summaryrefslogtreecommitdiffstats
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/__init__.py62
-rw-r--r--lib/sqlalchemy/engine/base.py3450
-rw-r--r--lib/sqlalchemy/engine/characteristics.py56
-rw-r--r--lib/sqlalchemy/engine/create.py743
-rw-r--r--lib/sqlalchemy/engine/cursor.py1942
-rw-r--r--lib/sqlalchemy/engine/default.py1936
-rw-r--r--lib/sqlalchemy/engine/events.py835
-rw-r--r--lib/sqlalchemy/engine/interfaces.py1719
-rw-r--r--lib/sqlalchemy/engine/mock.py118
-rw-r--r--lib/sqlalchemy/engine/reflection.py1160
-rw-r--r--lib/sqlalchemy/engine/result.py1857
-rw-r--r--lib/sqlalchemy/engine/row.py621
-rw-r--r--lib/sqlalchemy/engine/strategies.py17
-rw-r--r--lib/sqlalchemy/engine/url.py806
-rw-r--r--lib/sqlalchemy/engine/util.py253
15 files changed, 15575 insertions, 0 deletions
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
new file mode 100644
index 0000000..2437e17
--- /dev/null
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -0,0 +1,62 @@
+# engine/__init__.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""SQL connections, SQL execution and high-level DB-API interface.
+
+The engine package defines the basic components used to interface
+DB-API modules with higher-level statement construction,
+connection-management, execution and result contexts. The primary
+"entry point" class into this package is the Engine and its public
+constructor ``create_engine()``.
+
+"""
+
+from . import events
+from . import util
+from .base import Connection
+from .base import Engine
+from .base import NestedTransaction
+from .base import RootTransaction
+from .base import Transaction
+from .base import TwoPhaseTransaction
+from .create import create_engine
+from .create import engine_from_config
+from .cursor import BaseCursorResult
+from .cursor import BufferedColumnResultProxy
+from .cursor import BufferedColumnRow
+from .cursor import BufferedRowResultProxy
+from .cursor import CursorResult
+from .cursor import FullyBufferedResultProxy
+from .cursor import LegacyCursorResult
+from .cursor import ResultProxy
+from .interfaces import AdaptedConnection
+from .interfaces import Compiled
+from .interfaces import Connectable
+from .interfaces import CreateEnginePlugin
+from .interfaces import Dialect
+from .interfaces import ExceptionContext
+from .interfaces import ExecutionContext
+from .interfaces import TypeCompiler
+from .mock import create_mock_engine
+from .reflection import Inspector
+from .result import ChunkedIteratorResult
+from .result import FilterResult
+from .result import FrozenResult
+from .result import IteratorResult
+from .result import MappingResult
+from .result import MergedResult
+from .result import Result
+from .result import result_tuple
+from .result import ScalarResult
+from .row import BaseRow
+from .row import LegacyRow
+from .row import Row
+from .row import RowMapping
+from .url import make_url
+from .url import URL
+from .util import connection_memoize
+from ..sql import ddl
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
new file mode 100644
index 0000000..f126eb0
--- /dev/null
+++ b/lib/sqlalchemy/engine/base.py
@@ -0,0 +1,3450 @@
+# engine/base.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from __future__ import with_statement
+
+import contextlib
+import sys
+
+from .interfaces import Connectable
+from .interfaces import ExceptionContext
+from .util import _distill_params
+from .util import _distill_params_20
+from .util import TransactionalContext
+from .. import exc
+from .. import inspection
+from .. import log
+from .. import util
+from ..sql import compiler
+from ..sql import util as sql_util
+
+
+"""Defines :class:`_engine.Connection` and :class:`_engine.Engine`.
+
+"""
+
+_EMPTY_EXECUTION_OPTS = util.immutabledict()
+
+
+class Connection(Connectable):
+ """Provides high-level functionality for a wrapped DB-API connection.
+
+ **This is the SQLAlchemy 1.x.x version** of the :class:`_engine.Connection`
+ class. For the :term:`2.0 style` version, which features some API
+ differences, see :class:`_future.Connection`.
+
+ The :class:`_engine.Connection` object is procured by calling
+ the :meth:`_engine.Engine.connect` method of the :class:`_engine.Engine`
+ object, and provides services for execution of SQL statements as well
+ as transaction control.
+
+ The Connection object is **not** thread-safe. While a Connection can be
+ shared among threads using properly synchronized access, it is still
+ possible that the underlying DBAPI connection may not support shared
+ access between threads. Check the DBAPI documentation for details.
+
+ The Connection object represents a single DBAPI connection checked out
+ from the connection pool. In this state, the connection pool has no affect
+ upon the connection, including its expiration or timeout state. For the
+ connection pool to properly manage connections, connections should be
+ returned to the connection pool (i.e. ``connection.close()``) whenever the
+ connection is not in use.
+
+ .. index::
+ single: thread safety; Connection
+
+ """
+
+ _is_future = False
+ _sqla_logger_namespace = "sqlalchemy.engine.Connection"
+
+ # used by sqlalchemy.engine.util.TransactionalContext
+ _trans_context_manager = None
+
+ def __init__(
+ self,
+ engine,
+ connection=None,
+ close_with_result=False,
+ _branch_from=None,
+ _execution_options=None,
+ _dispatch=None,
+ _has_events=None,
+ _allow_revalidate=True,
+ ):
+ """Construct a new Connection."""
+ self.engine = engine
+ self.dialect = engine.dialect
+ self.__branch_from = _branch_from
+
+ if _branch_from:
+ # branching is always "from" the root connection
+ assert _branch_from.__branch_from is None
+ self._dbapi_connection = connection
+ self._execution_options = _execution_options
+ self._echo = _branch_from._echo
+ self.should_close_with_result = False
+ self.dispatch = _dispatch
+ self._has_events = _branch_from._has_events
+ else:
+ self._dbapi_connection = (
+ connection
+ if connection is not None
+ else engine.raw_connection()
+ )
+
+ self._transaction = self._nested_transaction = None
+ self.__savepoint_seq = 0
+ self.__in_begin = False
+ self.should_close_with_result = close_with_result
+
+ self.__can_reconnect = _allow_revalidate
+ self._echo = self.engine._should_log_info()
+
+ if _has_events is None:
+ # if _has_events is sent explicitly as False,
+ # then don't join the dispatch of the engine; we don't
+ # want to handle any of the engine's events in that case.
+ self.dispatch = self.dispatch._join(engine.dispatch)
+ self._has_events = _has_events or (
+ _has_events is None and engine._has_events
+ )
+
+ assert not _execution_options
+ self._execution_options = engine._execution_options
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.engine_connect(self, _branch_from is not None)
+
+ @util.memoized_property
+ def _message_formatter(self):
+ if "logging_token" in self._execution_options:
+ token = self._execution_options["logging_token"]
+ return lambda msg: "[%s] %s" % (token, msg)
+ else:
+ return None
+
+ def _log_info(self, message, *arg, **kw):
+ fmt = self._message_formatter
+
+ if fmt:
+ message = fmt(message)
+
+ if log.STACKLEVEL:
+ kw["stacklevel"] = 1 + log.STACKLEVEL_OFFSET
+
+ self.engine.logger.info(message, *arg, **kw)
+
+ def _log_debug(self, message, *arg, **kw):
+ fmt = self._message_formatter
+
+ if fmt:
+ message = fmt(message)
+
+ if log.STACKLEVEL:
+ kw["stacklevel"] = 1 + log.STACKLEVEL_OFFSET
+
+ self.engine.logger.debug(message, *arg, **kw)
+
+ @property
+ def _schema_translate_map(self):
+ return self._execution_options.get("schema_translate_map", None)
+
+ def schema_for_object(self, obj):
+ """Return the schema name for the given schema item taking into
+ account current schema translate map.
+
+ """
+
+ name = obj.schema
+ schema_translate_map = self._execution_options.get(
+ "schema_translate_map", None
+ )
+
+ if (
+ schema_translate_map
+ and name in schema_translate_map
+ and obj._use_schema_map
+ ):
+ return schema_translate_map[name]
+ else:
+ return name
+
+ def _branch(self):
+ """Return a new Connection which references this Connection's
+ engine and connection; but does not have close_with_result enabled,
+ and also whose close() method does nothing.
+
+ .. deprecated:: 1.4 the "branching" concept will be removed in
+ SQLAlchemy 2.0 as well as the "Connection.connect()" method which
+ is the only consumer for this.
+
+ The Core uses this very sparingly, only in the case of
+ custom SQL default functions that are to be INSERTed as the
+ primary key of a row where we need to get the value back, so we have
+ to invoke it distinctly - this is a very uncommon case.
+
+ Userland code accesses _branch() when the connect()
+ method is called. The branched connection
+ acts as much as possible like the parent, except that it stays
+ connected when a close() event occurs.
+
+ """
+ return self.engine._connection_cls(
+ self.engine,
+ self._dbapi_connection,
+ _branch_from=self.__branch_from if self.__branch_from else self,
+ _execution_options=self._execution_options,
+ _has_events=self._has_events,
+ _dispatch=self.dispatch,
+ )
+
+ def _generate_for_options(self):
+ """define connection method chaining behavior for execution_options"""
+
+ if self._is_future:
+ return self
+ else:
+ c = self.__class__.__new__(self.__class__)
+ c.__dict__ = self.__dict__.copy()
+ return c
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type_, value, traceback):
+ self.close()
+
+ def execution_options(self, **opt):
+ r""" Set non-SQL options for the connection which take effect
+ during execution.
+
+ For a "future" style connection, this method returns this same
+ :class:`_future.Connection` object with the new options added.
+
+ For a legacy connection, this method returns a copy of this
+ :class:`_engine.Connection` which references the same underlying DBAPI
+ connection, but also defines the given execution options which will
+ take effect for a call to
+ :meth:`execute`. As the new :class:`_engine.Connection` references the
+ same underlying resource, it's usually a good idea to ensure that
+ the copies will be discarded immediately, which is implicit if used
+ as in::
+
+ result = connection.execution_options(stream_results=True).\
+ execute(stmt)
+
+ Note that any key/value can be passed to
+ :meth:`_engine.Connection.execution_options`,
+ and it will be stored in the
+ ``_execution_options`` dictionary of the :class:`_engine.Connection`.
+ It
+ is suitable for usage by end-user schemes to communicate with
+ event listeners, for example.
+
+ The keywords that are currently recognized by SQLAlchemy itself
+ include all those listed under :meth:`.Executable.execution_options`,
+ as well as others that are specific to :class:`_engine.Connection`.
+
+ :param autocommit: Available on: Connection, statement.
+ When True, a COMMIT will be invoked after execution
+ when executed in 'autocommit' mode, i.e. when an explicit
+ transaction is not begun on the connection. Note that this
+ is **library level, not DBAPI level autocommit**. The DBAPI
+ connection will remain in a real transaction unless the
+ "AUTOCOMMIT" isolation level is used.
+
+ .. deprecated:: 1.4 The "autocommit" execution option is deprecated
+ and will be removed in SQLAlchemy 2.0. See
+ :ref:`migration_20_autocommit` for discussion.
+
+ :param compiled_cache: Available on: Connection.
+ A dictionary where :class:`.Compiled` objects
+ will be cached when the :class:`_engine.Connection`
+ compiles a clause
+ expression into a :class:`.Compiled` object. This dictionary will
+ supersede the statement cache that may be configured on the
+ :class:`_engine.Engine` itself. If set to None, caching
+ is disabled, even if the engine has a configured cache size.
+
+ Note that the ORM makes use of its own "compiled" caches for
+ some operations, including flush operations. The caching
+ used by the ORM internally supersedes a cache dictionary
+ specified here.
+
+ :param logging_token: Available on: :class:`_engine.Connection`,
+ :class:`_engine.Engine`.
+
+ Adds the specified string token surrounded by brackets in log
+ messages logged by the connection, i.e. the logging that's enabled
+ either via the :paramref:`_sa.create_engine.echo` flag or via the
+ ``logging.getLogger("sqlalchemy.engine")`` logger. This allows a
+ per-connection or per-sub-engine token to be available which is
+ useful for debugging concurrent connection scenarios.
+
+ .. versionadded:: 1.4.0b2
+
+ .. seealso::
+
+ :ref:`dbengine_logging_tokens` - usage example
+
+ :paramref:`_sa.create_engine.logging_name` - adds a name to the
+ name used by the Python logger object itself.
+
+ :param isolation_level: Available on: :class:`_engine.Connection`.
+
+ Set the transaction isolation level for the lifespan of this
+ :class:`_engine.Connection` object.
+ Valid values include those string
+ values accepted by the :paramref:`_sa.create_engine.isolation_level`
+ parameter passed to :func:`_sa.create_engine`. These levels are
+ semi-database specific; see individual dialect documentation for
+ valid levels.
+
+ The isolation level option applies the isolation level by emitting
+ statements on the DBAPI connection, and **necessarily affects the
+ original Connection object overall**, not just the copy that is
+ returned by the call to :meth:`_engine.Connection.execution_options`
+ method. The isolation level will remain at the given setting until
+ the DBAPI connection itself is returned to the connection pool, i.e.
+ the :meth:`_engine.Connection.close` method on the original
+ :class:`_engine.Connection` is called,
+ where an event handler will emit
+ additional statements on the DBAPI connection in order to revert the
+ isolation level change.
+
+ .. warning:: The ``isolation_level`` execution option should
+ **not** be used when a transaction is already established, that
+ is, the :meth:`_engine.Connection.begin`
+ method or similar has been
+ called. A database cannot change the isolation level on a
+ transaction in progress, and different DBAPIs and/or
+ SQLAlchemy dialects may implicitly roll back or commit
+ the transaction, or not affect the connection at all.
+
+ .. note:: The ``isolation_level`` execution option is implicitly
+ reset if the :class:`_engine.Connection` is invalidated, e.g. via
+ the :meth:`_engine.Connection.invalidate` method, or if a
+ disconnection error occurs. The new connection produced after
+ the invalidation will not have the isolation level re-applied
+ to it automatically.
+
+ .. seealso::
+
+ :paramref:`_sa.create_engine.isolation_level`
+ - set per :class:`_engine.Engine` isolation level
+
+ :meth:`_engine.Connection.get_isolation_level`
+ - view current level
+
+ :ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
+
+ :ref:`PostgreSQL Transaction Isolation <postgresql_isolation_level>`
+
+ :ref:`MySQL Transaction Isolation <mysql_isolation_level>`
+
+ :ref:`SQL Server Transaction Isolation <mssql_isolation_level>`
+
+ :ref:`session_transaction_isolation` - for the ORM
+
+ :param no_parameters: When ``True``, if the final parameter
+ list or dictionary is totally empty, will invoke the
+ statement on the cursor as ``cursor.execute(statement)``,
+ not passing the parameter collection at all.
+ Some DBAPIs such as psycopg2 and mysql-python consider
+ percent signs as significant only when parameters are
+ present; this option allows code to generate SQL
+ containing percent signs (and possibly other characters)
+ that is neutral regarding whether it's executed by the DBAPI
+ or piped into a script that's later invoked by
+ command line tools.
+
+ :param stream_results: Available on: Connection, statement.
+ Indicate to the dialect that results should be
+ "streamed" and not pre-buffered, if possible. For backends
+ such as PostgreSQL, MySQL and MariaDB, this indicates the use of
+ a "server side cursor" as opposed to a client side cursor.
+ Other backends such as that of Oracle may already use server
+ side cursors by default.
+
+ The usage of
+ :paramref:`_engine.Connection.execution_options.stream_results` is
+ usually combined with setting a fixed number of rows to to be fetched
+ in batches, to allow for efficient iteration of database rows while
+ at the same time not loading all result rows into memory at once;
+ this can be configured on a :class:`_engine.Result` object using the
+ :meth:`_engine.Result.yield_per` method, after execution has
+ returned a new :class:`_engine.Result`. If
+ :meth:`_engine.Result.yield_per` is not used,
+ the :paramref:`_engine.Connection.execution_options.stream_results`
+ mode of operation will instead use a dynamically sized buffer
+ which buffers sets of rows at a time, growing on each batch
+ based on a fixed growth size up until a limit which may
+ be configured using the
+ :paramref:`_engine.Connection.execution_options.max_row_buffer`
+ parameter.
+
+ When using the ORM to fetch ORM mapped objects from a result,
+ :meth:`_engine.Result.yield_per` should always be used with
+ :paramref:`_engine.Connection.execution_options.stream_results`,
+ so that the ORM does not fetch all rows into new ORM objects at once.
+
+ For typical use, the
+ :paramref:`_engine.Connection.execution_options.yield_per` execution
+ option should be preferred, which sets up both
+ :paramref:`_engine.Connection.execution_options.stream_results` and
+ :meth:`_engine.Result.yield_per` at once. This option is supported
+ both at a core level by :class:`_engine.Connection` as well as by the
+ ORM :class:`_engine.Session`; the latter is described at
+ :ref:`orm_queryguide_yield_per`.
+
+ .. seealso::
+
+ :ref:`engine_stream_results` - background on
+ :paramref:`_engine.Connection.execution_options.stream_results`
+
+ :paramref:`_engine.Connection.execution_options.max_row_buffer`
+
+ :paramref:`_engine.Connection.execution_options.yield_per`
+
+ :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+ describing the ORM version of ``yield_per``
+
+ :param max_row_buffer: Available on: :class:`_engine.Connection`,
+ :class:`_sql.Executable`. Sets a maximum
+ buffer size to use when the
+ :paramref:`_engine.Connection.execution_options.stream_results`
+ execution option is used on a backend that supports server side
+ cursors. The default value if not specified is 1000.
+
+ .. seealso::
+
+ :paramref:`_engine.Connection.execution_options.stream_results`
+
+ :ref:`engine_stream_results`
+
+
+ :param yield_per: Available on: :class:`_engine.Connection`,
+ :class:`_sql.Executable`. Integer value applied which will
+ set the :paramref:`_engine.Connection.execution_options.stream_results`
+ execution option and invoke :meth:`_engine.Result.yield_per`
+ automatically at once. Allows equivalent functionality as
+ is present when using this parameter with the ORM.
+
+ .. versionadded:: 1.4.40
+
+ .. seealso::
+
+ :ref:`engine_stream_results` - background and examples
+ on using server side cursors with Core.
+
+ :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+ describing the ORM version of ``yield_per``
+
+ :param schema_translate_map: Available on: :class:`_engine.Connection`,
+ :class:`_engine.Engine`, :class:`_sql.Executable`.
+
+ :param schema_translate_map: Available on: Connection, Engine.
+ A dictionary mapping schema names to schema names, that will be
+ applied to the :paramref:`_schema.Table.schema` element of each
+ :class:`_schema.Table`
+ encountered when SQL or DDL expression elements
+ are compiled into strings; the resulting schema name will be
+ converted based on presence in the map of the original name.
+
+ .. versionadded:: 1.1
+
+ .. seealso::
+
+ :ref:`schema_translating`
+
+ .. seealso::
+
+ :meth:`_engine.Engine.execution_options`
+
+ :meth:`.Executable.execution_options`
+
+ :meth:`_engine.Connection.get_execution_options`
+
+
+ """ # noqa
+ c = self._generate_for_options()
+ c._execution_options = c._execution_options.union(opt)
+ if self._has_events or self.engine._has_events:
+ self.dispatch.set_connection_execution_options(c, opt)
+ self.dialect.set_connection_execution_options(c, opt)
+ return c
+
+ def get_execution_options(self):
+ """Get the non-SQL options which will take effect during execution.
+
+ .. versionadded:: 1.3
+
+ .. seealso::
+
+ :meth:`_engine.Connection.execution_options`
+ """
+ return self._execution_options
+
+ @property
+ def closed(self):
+ """Return True if this connection is closed."""
+
+ # note this is independent for a "branched" connection vs.
+ # the base
+
+ return self._dbapi_connection is None and not self.__can_reconnect
+
+ @property
+ def invalidated(self):
+ """Return True if this connection was invalidated."""
+
+ # prior to 1.4, "invalid" was stored as a state independent of
+ # "closed", meaning an invalidated connection could be "closed",
+ # the _dbapi_connection would be None and closed=True, yet the
+ # "invalid" flag would stay True. This meant that there were
+ # three separate states (open/valid, closed/valid, closed/invalid)
+ # when there is really no reason for that; a connection that's
+ # "closed" does not need to be "invalid". So the state is now
+ # represented by the two facts alone.
+
+ if self.__branch_from:
+ return self.__branch_from.invalidated
+
+ return self._dbapi_connection is None and not self.closed
+
+ @property
+ def connection(self):
+ """The underlying DB-API connection managed by this Connection.
+
+ This is a SQLAlchemy connection-pool proxied connection
+ which then has the attribute
+ :attr:`_pool._ConnectionFairy.dbapi_connection` that refers to the
+ actual driver connection.
+
+ .. seealso::
+
+
+ :ref:`dbapi_connections`
+
+ """
+
+ if self._dbapi_connection is None:
+ try:
+ return self._revalidate_connection()
+ except (exc.PendingRollbackError, exc.ResourceClosedError):
+ raise
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+ else:
+ return self._dbapi_connection
+
+ def get_isolation_level(self):
+ """Return the current isolation level assigned to this
+ :class:`_engine.Connection`.
+
+ This will typically be the default isolation level as determined
+ by the dialect, unless if the
+ :paramref:`.Connection.execution_options.isolation_level`
+ feature has been used to alter the isolation level on a
+ per-:class:`_engine.Connection` basis.
+
+ This attribute will typically perform a live SQL operation in order
+ to procure the current isolation level, so the value returned is the
+ actual level on the underlying DBAPI connection regardless of how
+ this state was set. Compare to the
+ :attr:`_engine.Connection.default_isolation_level` accessor
+ which returns the dialect-level setting without performing a SQL
+ query.
+
+ .. versionadded:: 0.9.9
+
+ .. seealso::
+
+ :attr:`_engine.Connection.default_isolation_level`
+ - view default level
+
+ :paramref:`_sa.create_engine.isolation_level`
+ - set per :class:`_engine.Engine` isolation level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`_engine.Connection` isolation level
+
+ """
+ try:
+ return self.dialect.get_isolation_level(self.connection)
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ @property
+ def default_isolation_level(self):
+ """The default isolation level assigned to this
+ :class:`_engine.Connection`.
+
+ This is the isolation level setting that the
+ :class:`_engine.Connection`
+ has when first procured via the :meth:`_engine.Engine.connect` method.
+ This level stays in place until the
+ :paramref:`.Connection.execution_options.isolation_level` is used
+ to change the setting on a per-:class:`_engine.Connection` basis.
+
+ Unlike :meth:`_engine.Connection.get_isolation_level`,
+ this attribute is set
+ ahead of time from the first connection procured by the dialect,
+ so SQL query is not invoked when this accessor is called.
+
+ .. versionadded:: 0.9.9
+
+ .. seealso::
+
+ :meth:`_engine.Connection.get_isolation_level`
+ - view current level
+
+ :paramref:`_sa.create_engine.isolation_level`
+ - set per :class:`_engine.Engine` isolation level
+
+ :paramref:`.Connection.execution_options.isolation_level`
+ - set per :class:`_engine.Connection` isolation level
+
+ """
+ return self.dialect.default_isolation_level
+
+ def _invalid_transaction(self):
+ if self.invalidated:
+ raise exc.PendingRollbackError(
+ "Can't reconnect until invalid %stransaction is rolled "
+ "back."
+ % (
+ "savepoint "
+ if self._nested_transaction is not None
+ else ""
+ ),
+ code="8s2b",
+ )
+ else:
+ assert not self._is_future
+ raise exc.PendingRollbackError(
+ "This connection is on an inactive %stransaction. "
+ "Please rollback() fully before proceeding."
+ % (
+ "savepoint "
+ if self._nested_transaction is not None
+ else ""
+ ),
+ code="8s2a",
+ )
+
+ def _revalidate_connection(self):
+ if self.__branch_from:
+ return self.__branch_from._revalidate_connection()
+ if self.__can_reconnect and self.invalidated:
+ if self._transaction is not None:
+ self._invalid_transaction()
+ self._dbapi_connection = self.engine.raw_connection(
+ _connection=self
+ )
+ return self._dbapi_connection
+ raise exc.ResourceClosedError("This Connection is closed")
+
+ @property
+ def _still_open_and_dbapi_connection_is_valid(self):
+ return self._dbapi_connection is not None and getattr(
+ self._dbapi_connection, "is_valid", False
+ )
+
+ @property
+ def info(self):
+ """Info dictionary associated with the underlying DBAPI connection
+ referred to by this :class:`_engine.Connection`, allowing user-defined
+ data to be associated with the connection.
+
+ The data here will follow along with the DBAPI connection including
+ after it is returned to the connection pool and used again
+ in subsequent instances of :class:`_engine.Connection`.
+
+ """
+
+ return self.connection.info
+
+ @util.deprecated_20(":meth:`.Connection.connect`")
+ def connect(self, close_with_result=False):
+ """Returns a branched version of this :class:`_engine.Connection`.
+
+ The :meth:`_engine.Connection.close` method on the returned
+ :class:`_engine.Connection` can be called and this
+ :class:`_engine.Connection` will remain open.
+
+ This method provides usage symmetry with
+ :meth:`_engine.Engine.connect`, including for usage
+ with context managers.
+
+ """
+
+ return self._branch()
+
+ def invalidate(self, exception=None):
+ """Invalidate the underlying DBAPI connection associated with
+ this :class:`_engine.Connection`.
+
+ An attempt will be made to close the underlying DBAPI connection
+ immediately; however if this operation fails, the error is logged
+ but not raised. The connection is then discarded whether or not
+ close() succeeded.
+
+ Upon the next use (where "use" typically means using the
+ :meth:`_engine.Connection.execute` method or similar),
+ this :class:`_engine.Connection` will attempt to
+ procure a new DBAPI connection using the services of the
+ :class:`_pool.Pool` as a source of connectivity (e.g.
+ a "reconnection").
+
+ If a transaction was in progress (e.g. the
+ :meth:`_engine.Connection.begin` method has been called) when
+ :meth:`_engine.Connection.invalidate` method is called, at the DBAPI
+ level all state associated with this transaction is lost, as
+ the DBAPI connection is closed. The :class:`_engine.Connection`
+ will not allow a reconnection to proceed until the
+ :class:`.Transaction` object is ended, by calling the
+ :meth:`.Transaction.rollback` method; until that point, any attempt at
+ continuing to use the :class:`_engine.Connection` will raise an
+ :class:`~sqlalchemy.exc.InvalidRequestError`.
+ This is to prevent applications from accidentally
+ continuing an ongoing transactional operations despite the
+ fact that the transaction has been lost due to an
+ invalidation.
+
+ The :meth:`_engine.Connection.invalidate` method,
+ just like auto-invalidation,
+ will at the connection pool level invoke the
+ :meth:`_events.PoolEvents.invalidate` event.
+
+ :param exception: an optional ``Exception`` instance that's the
+ reason for the invalidation. is passed along to event handlers
+ and logging functions.
+
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
+
+ """
+
+ if self.__branch_from:
+ return self.__branch_from.invalidate(exception=exception)
+
+ if self.invalidated:
+ return
+
+ if self.closed:
+ raise exc.ResourceClosedError("This Connection is closed")
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ self._dbapi_connection.invalidate(exception)
+ self._dbapi_connection = None
+
+ def detach(self):
+ """Detach the underlying DB-API connection from its connection pool.
+
+ E.g.::
+
+ with engine.connect() as conn:
+ conn.detach()
+ conn.execute(text("SET search_path TO schema1, schema2"))
+
+ # work with connection
+
+ # connection is fully closed (since we used "with:", can
+ # also call .close())
+
+ This :class:`_engine.Connection` instance will remain usable.
+ When closed
+ (or exited from a context manager context as above),
+ the DB-API connection will be literally closed and not
+ returned to its originating pool.
+
+ This method can be used to insulate the rest of an application
+ from a modified state on a connection (such as a transaction
+ isolation level or similar).
+
+ """
+
+ self._dbapi_connection.detach()
+
+ def _autobegin(self):
+ self.begin()
+
+ def begin(self):
+ """Begin a transaction and return a transaction handle.
+
+ The returned object is an instance of :class:`.Transaction`.
+ This object represents the "scope" of the transaction,
+ which completes when either the :meth:`.Transaction.rollback`
+ or :meth:`.Transaction.commit` method is called.
+
+ .. tip::
+
+ The :meth:`_engine.Connection.begin` method is invoked when using
+ the :meth:`_engine.Engine.begin` context manager method as well.
+ All documentation that refers to behaviors specific to the
+ :meth:`_engine.Connection.begin` method also apply to use of the
+ :meth:`_engine.Engine.begin` method.
+
+ Legacy use: nested calls to :meth:`.begin` on the same
+ :class:`_engine.Connection` will return new :class:`.Transaction`
+ objects that represent an emulated transaction within the scope of the
+ enclosing transaction, that is::
+
+ trans = conn.begin() # outermost transaction
+ trans2 = conn.begin() # "nested"
+ trans2.commit() # does nothing
+ trans.commit() # actually commits
+
+ Calls to :meth:`.Transaction.commit` only have an effect
+ when invoked via the outermost :class:`.Transaction` object, though the
+ :meth:`.Transaction.rollback` method of any of the
+ :class:`.Transaction` objects will roll back the
+ transaction.
+
+ .. tip::
+
+ The above "nesting" behavior is a legacy behavior specific to
+ :term:`1.x style` use and will be removed in SQLAlchemy 2.0. For
+ notes on :term:`2.0 style` use, see
+ :meth:`_future.Connection.begin`.
+
+
+ .. seealso::
+
+ :meth:`_engine.Connection.begin_nested` - use a SAVEPOINT
+
+ :meth:`_engine.Connection.begin_twophase` -
+ use a two phase /XID transaction
+
+ :meth:`_engine.Engine.begin` - context manager available from
+ :class:`_engine.Engine`
+
+ """
+ if self._is_future:
+ assert not self.__branch_from
+ elif self.__branch_from:
+ return self.__branch_from.begin()
+
+ if self.__in_begin:
+ # for dialects that emit SQL within the process of
+ # dialect.do_begin() or dialect.do_begin_twophase(), this
+ # flag prevents "autobegin" from being emitted within that
+ # process, while allowing self._transaction to remain at None
+ # until it's complete.
+ return
+ elif self._transaction is None:
+ self._transaction = RootTransaction(self)
+ return self._transaction
+ else:
+ if self._is_future:
+ raise exc.InvalidRequestError(
+ "This connection has already initialized a SQLAlchemy "
+ "Transaction() object via begin() or autobegin; can't "
+ "call begin() here unless rollback() or commit() "
+ "is called first."
+ )
+ else:
+ return MarkerTransaction(self)
+
+ def begin_nested(self):
+ """Begin a nested transaction (i.e. SAVEPOINT) and return a
+ transaction handle, assuming an outer transaction is already
+ established.
+
+ Nested transactions require SAVEPOINT support in the
+ underlying database. Any transaction in the hierarchy may
+ ``commit`` and ``rollback``, however the outermost transaction
+ still controls the overall ``commit`` or ``rollback`` of the
+ transaction of a whole.
+
+ The legacy form of :meth:`_engine.Connection.begin_nested` method has
+ alternate behaviors based on whether or not the
+ :meth:`_engine.Connection.begin` method was called previously. If
+ :meth:`_engine.Connection.begin` was not called, then this method will
+ behave the same as the :meth:`_engine.Connection.begin` method and
+ return a :class:`.RootTransaction` object that begins and commits a
+ real transaction - **no savepoint is invoked**. If
+ :meth:`_engine.Connection.begin` **has** been called, and a
+ :class:`.RootTransaction` is already established, then this method
+ returns an instance of :class:`.NestedTransaction` which will invoke
+ and manage the scope of a SAVEPOINT.
+
+ .. tip::
+
+ The above mentioned behavior of
+ :meth:`_engine.Connection.begin_nested` is a legacy behavior
+ specific to :term:`1.x style` use. In :term:`2.0 style` use, the
+ :meth:`_future.Connection.begin_nested` method instead autobegins
+ the outer transaction that can be committed using
+ "commit-as-you-go" style; see
+ :meth:`_future.Connection.begin_nested` for migration details.
+
+ .. versionchanged:: 1.4.13 The behavior of
+ :meth:`_engine.Connection.begin_nested`
+ as returning a :class:`.RootTransaction` if
+ :meth:`_engine.Connection.begin` were not called has been restored
+ as was the case in 1.3.x versions; in previous 1.4.x versions, an
+ outer transaction would be "autobegun" but would not be committed.
+
+
+ .. seealso::
+
+ :meth:`_engine.Connection.begin`
+
+ :ref:`session_begin_nested` - ORM support for SAVEPOINT
+
+ """
+ if self._is_future:
+ assert not self.__branch_from
+ elif self.__branch_from:
+ return self.__branch_from.begin_nested()
+
+ if self._transaction is None:
+ if not self._is_future:
+ util.warn_deprecated_20(
+ "Calling Connection.begin_nested() in 2.0 style use will "
+ "return a NestedTransaction (SAVEPOINT) in all cases, "
+ "that will not commit the outer transaction. For code "
+ "that is cross-compatible between 1.x and 2.0 style use, "
+ "ensure Connection.begin() is called before calling "
+ "Connection.begin_nested()."
+ )
+ return self.begin()
+ else:
+ self._autobegin()
+
+ return NestedTransaction(self)
+
+ def begin_twophase(self, xid=None):
+ """Begin a two-phase or XA transaction and return a transaction
+ handle.
+
+ The returned object is an instance of :class:`.TwoPhaseTransaction`,
+ which in addition to the methods provided by
+ :class:`.Transaction`, also provides a
+ :meth:`~.TwoPhaseTransaction.prepare` method.
+
+ :param xid: the two phase transaction id. If not supplied, a
+ random id will be generated.
+
+ .. seealso::
+
+ :meth:`_engine.Connection.begin`
+
+ :meth:`_engine.Connection.begin_twophase`
+
+ """
+
+ if self.__branch_from:
+ return self.__branch_from.begin_twophase(xid=xid)
+
+ if self._transaction is not None:
+ raise exc.InvalidRequestError(
+ "Cannot start a two phase transaction when a transaction "
+ "is already in progress."
+ )
+ if xid is None:
+ xid = self.engine.dialect.create_xid()
+ return TwoPhaseTransaction(self, xid)
+
+ def recover_twophase(self):
+ return self.engine.dialect.do_recover_twophase(self)
+
+ def rollback_prepared(self, xid, recover=False):
+ self.engine.dialect.do_rollback_twophase(self, xid, recover=recover)
+
+ def commit_prepared(self, xid, recover=False):
+ self.engine.dialect.do_commit_twophase(self, xid, recover=recover)
+
+ def in_transaction(self):
+ """Return True if a transaction is in progress."""
+ if self.__branch_from is not None:
+ return self.__branch_from.in_transaction()
+
+ return self._transaction is not None and self._transaction.is_active
+
+ def in_nested_transaction(self):
+ """Return True if a transaction is in progress."""
+ if self.__branch_from is not None:
+ return self.__branch_from.in_nested_transaction()
+
+ return (
+ self._nested_transaction is not None
+ and self._nested_transaction.is_active
+ )
+
+ def _is_autocommit_isolation(self):
+ opt_iso = self._execution_options.get("isolation_level", None)
+ return bool(
+ opt_iso == "AUTOCOMMIT"
+ or (
+ opt_iso is None
+ and getattr(self.engine.dialect, "isolation_level", None)
+ == "AUTOCOMMIT"
+ )
+ )
+
+ def get_transaction(self):
+ """Return the current root transaction in progress, if any.
+
+ .. versionadded:: 1.4
+
+ """
+
+ if self.__branch_from is not None:
+ return self.__branch_from.get_transaction()
+
+ return self._transaction
+
+ def get_nested_transaction(self):
+ """Return the current nested transaction in progress, if any.
+
+ .. versionadded:: 1.4
+
+ """
+ if self.__branch_from is not None:
+
+ return self.__branch_from.get_nested_transaction()
+
+ return self._nested_transaction
+
+ def _begin_impl(self, transaction):
+ assert not self.__branch_from
+
+ if self._echo:
+ if self._is_autocommit_isolation():
+ self._log_info(
+ "BEGIN (implicit; DBAPI should not BEGIN due to "
+ "autocommit mode)"
+ )
+ else:
+ self._log_info("BEGIN (implicit)")
+
+ self.__in_begin = True
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.begin(self)
+
+ try:
+ self.engine.dialect.do_begin(self.connection)
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ self.__in_begin = False
+
+ def _rollback_impl(self):
+ assert not self.__branch_from
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.rollback(self)
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ if self._echo:
+ if self._is_autocommit_isolation():
+ self._log_info(
+ "ROLLBACK using DBAPI connection.rollback(), "
+ "DBAPI should ignore due to autocommit mode"
+ )
+ else:
+ self._log_info("ROLLBACK")
+ try:
+ self.engine.dialect.do_rollback(self.connection)
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ def _commit_impl(self, autocommit=False):
+ assert not self.__branch_from
+
+ # AUTOCOMMIT isolation-level is a dialect-specific concept, however
+ # if a connection has this set as the isolation level, we can skip
+ # the "autocommit" warning as the operation will do "autocommit"
+ # in any case
+ if autocommit and not self._is_autocommit_isolation():
+ util.warn_deprecated_20(
+ "The current statement is being autocommitted using "
+ "implicit autocommit, which will be removed in "
+ "SQLAlchemy 2.0. "
+ "Use the .begin() method of Engine or Connection in order to "
+ "use an explicit transaction for DML and DDL statements."
+ )
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.commit(self)
+
+ if self._echo:
+ if self._is_autocommit_isolation():
+ self._log_info(
+ "COMMIT using DBAPI connection.commit(), "
+ "DBAPI should ignore due to autocommit mode"
+ )
+ else:
+ self._log_info("COMMIT")
+ try:
+ self.engine.dialect.do_commit(self.connection)
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ def _savepoint_impl(self, name=None):
+ assert not self.__branch_from
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.savepoint(self, name)
+
+ if name is None:
+ self.__savepoint_seq += 1
+ name = "sa_savepoint_%s" % self.__savepoint_seq
+ if self._still_open_and_dbapi_connection_is_valid:
+ self.engine.dialect.do_savepoint(self, name)
+ return name
+
+ def _rollback_to_savepoint_impl(self, name):
+ assert not self.__branch_from
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.rollback_savepoint(self, name, None)
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ self.engine.dialect.do_rollback_to_savepoint(self, name)
+
+ def _release_savepoint_impl(self, name):
+ assert not self.__branch_from
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.release_savepoint(self, name, None)
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ self.engine.dialect.do_release_savepoint(self, name)
+
+ def _begin_twophase_impl(self, transaction):
+ assert not self.__branch_from
+
+ if self._echo:
+ self._log_info("BEGIN TWOPHASE (implicit)")
+ if self._has_events or self.engine._has_events:
+ self.dispatch.begin_twophase(self, transaction.xid)
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ self.__in_begin = True
+ try:
+ self.engine.dialect.do_begin_twophase(self, transaction.xid)
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ self.__in_begin = False
+
+ def _prepare_twophase_impl(self, xid):
+ assert not self.__branch_from
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.prepare_twophase(self, xid)
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ assert isinstance(self._transaction, TwoPhaseTransaction)
+ try:
+ self.engine.dialect.do_prepare_twophase(self, xid)
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ def _rollback_twophase_impl(self, xid, is_prepared):
+ assert not self.__branch_from
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.rollback_twophase(self, xid, is_prepared)
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ assert isinstance(self._transaction, TwoPhaseTransaction)
+ try:
+ self.engine.dialect.do_rollback_twophase(
+ self, xid, is_prepared
+ )
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ def _commit_twophase_impl(self, xid, is_prepared):
+ assert not self.__branch_from
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.commit_twophase(self, xid, is_prepared)
+
+ if self._still_open_and_dbapi_connection_is_valid:
+ assert isinstance(self._transaction, TwoPhaseTransaction)
+ try:
+ self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ def _autorollback(self):
+ if self.__branch_from:
+ self.__branch_from._autorollback()
+
+ if not self.in_transaction():
+ self._rollback_impl()
+
+ def _warn_for_legacy_exec_format(self):
+ util.warn_deprecated_20(
+ "The connection.execute() method in "
+ "SQLAlchemy 2.0 will accept parameters as a single "
+ "dictionary or a "
+ "single sequence of dictionaries only. "
+ "Parameters passed as keyword arguments, tuples or positionally "
+ "oriented dictionaries and/or tuples "
+ "will no longer be accepted."
+ )
+
+ def close(self):
+ """Close this :class:`_engine.Connection`.
+
+ This results in a release of the underlying database
+ resources, that is, the DBAPI connection referenced
+ internally. The DBAPI connection is typically restored
+ back to the connection-holding :class:`_pool.Pool` referenced
+ by the :class:`_engine.Engine` that produced this
+ :class:`_engine.Connection`. Any transactional state present on
+ the DBAPI connection is also unconditionally released via
+ the DBAPI connection's ``rollback()`` method, regardless
+ of any :class:`.Transaction` object that may be
+ outstanding with regards to this :class:`_engine.Connection`.
+
+ After :meth:`_engine.Connection.close` is called, the
+ :class:`_engine.Connection` is permanently in a closed state,
+ and will allow no further operations.
+
+ """
+
+ if self.__branch_from:
+ assert not self._is_future
+ util.warn_deprecated_20(
+ "The .close() method on a so-called 'branched' connection is "
+ "deprecated as of 1.4, as are 'branched' connections overall, "
+ "and will be removed in a future release. If this is a "
+ "default-handling function, don't close the connection."
+ )
+ self._dbapi_connection = None
+ self.__can_reconnect = False
+ return
+
+ if self._transaction:
+ self._transaction.close()
+ skip_reset = True
+ else:
+ skip_reset = False
+
+ if self._dbapi_connection is not None:
+ conn = self._dbapi_connection
+
+ # as we just closed the transaction, close the connection
+ # pool connection without doing an additional reset
+ if skip_reset:
+ conn._close_no_reset()
+ else:
+ conn.close()
+
+ # There is a slight chance that conn.close() may have
+ # triggered an invalidation here in which case
+ # _dbapi_connection would already be None, however usually
+ # it will be non-None here and in a "closed" state.
+ self._dbapi_connection = None
+ self.__can_reconnect = False
+
+ def scalar(self, object_, *multiparams, **params):
+ """Executes and returns the first column of the first row.
+
+ The underlying result/cursor is closed after execution.
+
+ """
+
+ return self.execute(object_, *multiparams, **params).scalar()
+
+ def scalars(self, object_, *multiparams, **params):
+ """Executes and returns a scalar result set, which yields scalar values
+ from the first column of each row.
+
+ This method is equivalent to calling :meth:`_engine.Connection.execute`
+ to receive a :class:`_result.Result` object, then invoking the
+ :meth:`_result.Result.scalars` method to produce a
+ :class:`_result.ScalarResult` instance.
+
+ :return: a :class:`_result.ScalarResult`
+
+ .. versionadded:: 1.4.24
+
+ """
+
+ return self.execute(object_, *multiparams, **params).scalars()
+
+ def execute(self, statement, *multiparams, **params):
+ r"""Executes a SQL statement construct and returns a
+ :class:`_engine.CursorResult`.
+
+ :param statement: The statement to be executed. May be
+ one of:
+
+ * a plain string (deprecated)
+ * any :class:`_expression.ClauseElement` construct that is also
+ a subclass of :class:`.Executable`, such as a
+ :func:`_expression.select` construct
+ * a :class:`.FunctionElement`, such as that generated
+ by :data:`.func`, will be automatically wrapped in
+ a SELECT statement, which is then executed.
+ * a :class:`.DDLElement` object
+ * a :class:`.DefaultGenerator` object
+ * a :class:`.Compiled` object
+
+ .. deprecated:: 2.0 passing a string to
+ :meth:`_engine.Connection.execute` is
+ deprecated and will be removed in version 2.0. Use the
+ :func:`_expression.text` construct with
+ :meth:`_engine.Connection.execute`, or the
+ :meth:`_engine.Connection.exec_driver_sql`
+ method to invoke a driver-level
+ SQL string.
+
+ :param \*multiparams/\**params: represent bound parameter
+ values to be used in the execution. Typically,
+ the format is either a collection of one or more
+ dictionaries passed to \*multiparams::
+
+ conn.execute(
+ table.insert(),
+ {"id":1, "value":"v1"},
+ {"id":2, "value":"v2"}
+ )
+
+ ...or individual key/values interpreted by \**params::
+
+ conn.execute(
+ table.insert(), id=1, value="v1"
+ )
+
+ In the case that a plain SQL string is passed, and the underlying
+ DBAPI accepts positional bind parameters, a collection of tuples
+ or individual values in \*multiparams may be passed::
+
+ conn.execute(
+ "INSERT INTO table (id, value) VALUES (?, ?)",
+ (1, "v1"), (2, "v2")
+ )
+
+ conn.execute(
+ "INSERT INTO table (id, value) VALUES (?, ?)",
+ 1, "v1"
+ )
+
+ Note above, the usage of a question mark "?" or other
+ symbol is contingent upon the "paramstyle" accepted by the DBAPI
+ in use, which may be any of "qmark", "named", "pyformat", "format",
+ "numeric". See `pep-249
+ <https://www.python.org/dev/peps/pep-0249/>`_ for details on
+ paramstyle.
+
+ To execute a textual SQL statement which uses bound parameters in a
+ DBAPI-agnostic way, use the :func:`_expression.text` construct.
+
+ .. deprecated:: 2.0 use of tuple or scalar positional parameters
+ is deprecated. All params should be dicts or sequences of dicts.
+ Use :meth:`.exec_driver_sql` to execute a plain string with
+ tuple or scalar positional parameters.
+
+ """
+
+ if isinstance(statement, util.string_types):
+ util.warn_deprecated_20(
+ "Passing a string to Connection.execute() is "
+ "deprecated and will be removed in version 2.0. Use the "
+ "text() construct, "
+ "or the Connection.exec_driver_sql() method to invoke a "
+ "driver-level SQL string."
+ )
+
+ return self._exec_driver_sql(
+ statement,
+ multiparams,
+ params,
+ _EMPTY_EXECUTION_OPTS,
+ future=False,
+ )
+
+ try:
+ meth = statement._execute_on_connection
+ except AttributeError as err:
+ util.raise_(
+ exc.ObjectNotExecutableError(statement), replace_context=err
+ )
+ else:
+ return meth(self, multiparams, params, _EMPTY_EXECUTION_OPTS)
+
+ def _execute_function(self, func, multiparams, params, execution_options):
+ """Execute a sql.FunctionElement object."""
+
+ return self._execute_clauseelement(
+ func.select(), multiparams, params, execution_options
+ )
+
+ def _execute_default(
+ self,
+ default,
+ multiparams,
+ params,
+ # migrate is calling this directly :(
+ execution_options=_EMPTY_EXECUTION_OPTS,
+ ):
+ """Execute a schema.ColumnDefault object."""
+
+ execution_options = self._execution_options.merge_with(
+ execution_options
+ )
+
+ distilled_parameters = _distill_params(self, multiparams, params)
+
+ if self._has_events or self.engine._has_events:
+ (
+ default,
+ distilled_params,
+ event_multiparams,
+ event_params,
+ ) = self._invoke_before_exec_event(
+ default, distilled_parameters, execution_options
+ )
+
+ try:
+ conn = self._dbapi_connection
+ if conn is None:
+ conn = self._revalidate_connection()
+
+ dialect = self.dialect
+ ctx = dialect.execution_ctx_cls._init_default(
+ dialect, self, conn, execution_options
+ )
+ except (exc.PendingRollbackError, exc.ResourceClosedError):
+ raise
+ except BaseException as e:
+ self._handle_dbapi_exception(e, None, None, None, None)
+
+ ret = ctx._exec_default(None, default, None)
+ if self.should_close_with_result:
+ self.close()
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.after_execute(
+ self,
+ default,
+ event_multiparams,
+ event_params,
+ execution_options,
+ ret,
+ )
+
+ return ret
+
+ def _execute_ddl(self, ddl, multiparams, params, execution_options):
+ """Execute a schema.DDL object."""
+
+ execution_options = ddl._execution_options.merge_with(
+ self._execution_options, execution_options
+ )
+
+ distilled_parameters = _distill_params(self, multiparams, params)
+
+ if self._has_events or self.engine._has_events:
+ (
+ ddl,
+ distilled_params,
+ event_multiparams,
+ event_params,
+ ) = self._invoke_before_exec_event(
+ ddl, distilled_parameters, execution_options
+ )
+
+ exec_opts = self._execution_options.merge_with(execution_options)
+ schema_translate_map = exec_opts.get("schema_translate_map", None)
+
+ dialect = self.dialect
+
+ compiled = ddl.compile(
+ dialect=dialect, schema_translate_map=schema_translate_map
+ )
+ ret = self._execute_context(
+ dialect,
+ dialect.execution_ctx_cls._init_ddl,
+ compiled,
+ None,
+ execution_options,
+ compiled,
+ )
+ if self._has_events or self.engine._has_events:
+ self.dispatch.after_execute(
+ self,
+ ddl,
+ event_multiparams,
+ event_params,
+ execution_options,
+ ret,
+ )
+ return ret
+
+ def _invoke_before_exec_event(
+ self, elem, distilled_params, execution_options
+ ):
+
+ if len(distilled_params) == 1:
+ event_multiparams, event_params = [], distilled_params[0]
+ else:
+ event_multiparams, event_params = distilled_params, {}
+
+ for fn in self.dispatch.before_execute:
+ elem, event_multiparams, event_params = fn(
+ self,
+ elem,
+ event_multiparams,
+ event_params,
+ execution_options,
+ )
+
+ if event_multiparams:
+ distilled_params = list(event_multiparams)
+ if event_params:
+ raise exc.InvalidRequestError(
+ "Event handler can't return non-empty multiparams "
+ "and params at the same time"
+ )
+ elif event_params:
+ distilled_params = [event_params]
+ else:
+ distilled_params = []
+
+ return elem, distilled_params, event_multiparams, event_params
+
+ def _execute_clauseelement(
+ self, elem, multiparams, params, execution_options
+ ):
+ """Execute a sql.ClauseElement object."""
+
+ execution_options = elem._execution_options.merge_with(
+ self._execution_options, execution_options
+ )
+
+ distilled_params = _distill_params(self, multiparams, params)
+
+ has_events = self._has_events or self.engine._has_events
+ if has_events:
+ (
+ elem,
+ distilled_params,
+ event_multiparams,
+ event_params,
+ ) = self._invoke_before_exec_event(
+ elem, distilled_params, execution_options
+ )
+
+ if distilled_params:
+ # ensure we don't retain a link to the view object for keys()
+ # which links to the values, which we don't want to cache
+ keys = sorted(distilled_params[0])
+ for_executemany = len(distilled_params) > 1
+ else:
+ keys = []
+ for_executemany = False
+
+ dialect = self.dialect
+
+ schema_translate_map = execution_options.get(
+ "schema_translate_map", None
+ )
+
+ compiled_cache = execution_options.get(
+ "compiled_cache", self.engine._compiled_cache
+ )
+
+ compiled_sql, extracted_params, cache_hit = elem._compile_w_cache(
+ dialect=dialect,
+ compiled_cache=compiled_cache,
+ column_keys=keys,
+ for_executemany=for_executemany,
+ schema_translate_map=schema_translate_map,
+ linting=self.dialect.compiler_linting | compiler.WARN_LINTING,
+ )
+ ret = self._execute_context(
+ dialect,
+ dialect.execution_ctx_cls._init_compiled,
+ compiled_sql,
+ distilled_params,
+ execution_options,
+ compiled_sql,
+ distilled_params,
+ elem,
+ extracted_params,
+ cache_hit=cache_hit,
+ )
+ if has_events:
+ self.dispatch.after_execute(
+ self,
+ elem,
+ event_multiparams,
+ event_params,
+ execution_options,
+ ret,
+ )
+ return ret
+
+ def _execute_compiled(
+ self,
+ compiled,
+ multiparams,
+ params,
+ execution_options=_EMPTY_EXECUTION_OPTS,
+ ):
+ """Execute a sql.Compiled object.
+
+ TODO: why do we have this? likely deprecate or remove
+
+ """
+
+ execution_options = compiled.execution_options.merge_with(
+ self._execution_options, execution_options
+ )
+ distilled_parameters = _distill_params(self, multiparams, params)
+
+ if self._has_events or self.engine._has_events:
+ (
+ compiled,
+ distilled_params,
+ event_multiparams,
+ event_params,
+ ) = self._invoke_before_exec_event(
+ compiled, distilled_parameters, execution_options
+ )
+
+ dialect = self.dialect
+
+ ret = self._execute_context(
+ dialect,
+ dialect.execution_ctx_cls._init_compiled,
+ compiled,
+ distilled_parameters,
+ execution_options,
+ compiled,
+ distilled_parameters,
+ None,
+ None,
+ )
+ if self._has_events or self.engine._has_events:
+ self.dispatch.after_execute(
+ self,
+ compiled,
+ event_multiparams,
+ event_params,
+ execution_options,
+ ret,
+ )
+ return ret
+
+ def _exec_driver_sql(
+ self, statement, multiparams, params, execution_options, future
+ ):
+
+ execution_options = self._execution_options.merge_with(
+ execution_options
+ )
+
+ distilled_parameters = _distill_params(self, multiparams, params)
+
+ if not future:
+ if self._has_events or self.engine._has_events:
+ (
+ statement,
+ distilled_params,
+ event_multiparams,
+ event_params,
+ ) = self._invoke_before_exec_event(
+ statement, distilled_parameters, execution_options
+ )
+
+ dialect = self.dialect
+ ret = self._execute_context(
+ dialect,
+ dialect.execution_ctx_cls._init_statement,
+ statement,
+ distilled_parameters,
+ execution_options,
+ statement,
+ distilled_parameters,
+ )
+
+ if not future:
+ if self._has_events or self.engine._has_events:
+ self.dispatch.after_execute(
+ self,
+ statement,
+ event_multiparams,
+ event_params,
+ execution_options,
+ ret,
+ )
+ return ret
+
+ def _execute_20(
+ self,
+ statement,
+ parameters=None,
+ execution_options=_EMPTY_EXECUTION_OPTS,
+ ):
+ args_10style, kwargs_10style = _distill_params_20(parameters)
+ try:
+ meth = statement._execute_on_connection
+ except AttributeError as err:
+ util.raise_(
+ exc.ObjectNotExecutableError(statement), replace_context=err
+ )
+ else:
+ return meth(self, args_10style, kwargs_10style, execution_options)
+
+ def exec_driver_sql(
+ self, statement, parameters=None, execution_options=None
+ ):
+ r"""Executes a SQL statement construct and returns a
+ :class:`_engine.CursorResult`.
+
+ :param statement: The statement str to be executed. Bound parameters
+ must use the underlying DBAPI's paramstyle, such as "qmark",
+ "pyformat", "format", etc.
+
+ :param parameters: represent bound parameter values to be used in the
+ execution. The format is one of: a dictionary of named parameters,
+ a tuple of positional parameters, or a list containing either
+ dictionaries or tuples for multiple-execute support.
+
+ E.g. multiple dictionaries::
+
+
+ conn.exec_driver_sql(
+ "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
+ [{"id":1, "value":"v1"}, {"id":2, "value":"v2"}]
+ )
+
+ Single dictionary::
+
+ conn.exec_driver_sql(
+ "INSERT INTO table (id, value) VALUES (%(id)s, %(value)s)",
+ dict(id=1, value="v1")
+ )
+
+ Single tuple::
+
+ conn.exec_driver_sql(
+ "INSERT INTO table (id, value) VALUES (?, ?)",
+ (1, 'v1')
+ )
+
+ .. note:: The :meth:`_engine.Connection.exec_driver_sql` method does
+ not participate in the
+ :meth:`_events.ConnectionEvents.before_execute` and
+ :meth:`_events.ConnectionEvents.after_execute` events. To
+ intercept calls to :meth:`_engine.Connection.exec_driver_sql`, use
+ :meth:`_events.ConnectionEvents.before_cursor_execute` and
+ :meth:`_events.ConnectionEvents.after_cursor_execute`.
+
+ .. seealso::
+
+ :pep:`249`
+
+ """
+
+ args_10style, kwargs_10style = _distill_params_20(parameters)
+
+ return self._exec_driver_sql(
+ statement,
+ args_10style,
+ kwargs_10style,
+ execution_options,
+ future=True,
+ )
+
+ def _execute_context(
+ self,
+ dialect,
+ constructor,
+ statement,
+ parameters,
+ execution_options,
+ *args,
+ **kw
+ ):
+ """Create an :class:`.ExecutionContext` and execute, returning
+ a :class:`_engine.CursorResult`."""
+
+ branched = self
+ if self.__branch_from:
+ # if this is a "branched" connection, do everything in terms
+ # of the "root" connection, *except* for .close(), which is
+ # the only feature that branching provides
+ self = self.__branch_from
+
+ if execution_options:
+ yp = execution_options.get("yield_per", None)
+ if yp:
+ execution_options = execution_options.union(
+ {"stream_results": True, "max_row_buffer": yp}
+ )
+
+ try:
+ conn = self._dbapi_connection
+ if conn is None:
+ conn = self._revalidate_connection()
+
+ context = constructor(
+ dialect, self, conn, execution_options, *args, **kw
+ )
+ except (exc.PendingRollbackError, exc.ResourceClosedError):
+ raise
+ except BaseException as e:
+ self._handle_dbapi_exception(
+ e, util.text_type(statement), parameters, None, None
+ )
+
+ if (
+ self._transaction
+ and not self._transaction.is_active
+ or (
+ self._nested_transaction
+ and not self._nested_transaction.is_active
+ )
+ ):
+ self._invalid_transaction()
+
+ elif self._trans_context_manager:
+ TransactionalContext._trans_ctx_check(self)
+
+ if self._is_future and self._transaction is None:
+ self._autobegin()
+
+ context.pre_exec()
+
+ if dialect.use_setinputsizes:
+ context._set_input_sizes()
+
+ cursor, statement, parameters = (
+ context.cursor,
+ context.statement,
+ context.parameters,
+ )
+
+ if not context.executemany:
+ parameters = parameters[0]
+
+ if self._has_events or self.engine._has_events:
+ for fn in self.dispatch.before_cursor_execute:
+ statement, parameters = fn(
+ self,
+ cursor,
+ statement,
+ parameters,
+ context,
+ context.executemany,
+ )
+
+ if self._echo:
+
+ self._log_info(statement)
+
+ stats = context._get_cache_stats()
+
+ if not self.engine.hide_parameters:
+ self._log_info(
+ "[%s] %r",
+ stats,
+ sql_util._repr_params(
+ parameters, batches=10, ismulti=context.executemany
+ ),
+ )
+ else:
+ self._log_info(
+ "[%s] [SQL parameters hidden due to hide_parameters=True]"
+ % (stats,)
+ )
+
+ evt_handled = False
+ try:
+ if context.executemany:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_executemany:
+ if fn(cursor, statement, parameters, context):
+ evt_handled = True
+ break
+ if not evt_handled:
+ self.dialect.do_executemany(
+ cursor, statement, parameters, context
+ )
+ elif not parameters and context.no_parameters:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_execute_no_params:
+ if fn(cursor, statement, context):
+ evt_handled = True
+ break
+ if not evt_handled:
+ self.dialect.do_execute_no_params(
+ cursor, statement, context
+ )
+ else:
+ if self.dialect._has_events:
+ for fn in self.dialect.dispatch.do_execute:
+ if fn(cursor, statement, parameters, context):
+ evt_handled = True
+ break
+ if not evt_handled:
+ self.dialect.do_execute(
+ cursor, statement, parameters, context
+ )
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.after_cursor_execute(
+ self,
+ cursor,
+ statement,
+ parameters,
+ context,
+ context.executemany,
+ )
+
+ context.post_exec()
+
+ result = context._setup_result_proxy()
+
+ if not self._is_future:
+ should_close_with_result = branched.should_close_with_result
+
+ if not result._soft_closed and should_close_with_result:
+ result._autoclose_connection = True
+
+ if (
+ # usually we're in a transaction so avoid relatively
+ # expensive / legacy should_autocommit call
+ self._transaction is None
+ and context.should_autocommit
+ ):
+ self._commit_impl(autocommit=True)
+
+ # for "connectionless" execution, we have to close this
+ # Connection after the statement is complete.
+ # legacy stuff.
+ if should_close_with_result and context._soft_closed:
+ assert not self._is_future
+
+ # CursorResult already exhausted rows / has no rows.
+ # close us now
+ branched.close()
+
+ except BaseException as e:
+ self._handle_dbapi_exception(
+ e, statement, parameters, cursor, context
+ )
+
+ return result
+
+ def _cursor_execute(self, cursor, statement, parameters, context=None):
+ """Execute a statement + params on the given cursor.
+
+ Adds appropriate logging and exception handling.
+
+ This method is used by DefaultDialect for special-case
+ executions, such as for sequences and column defaults.
+ The path of statement execution in the majority of cases
+ terminates at _execute_context().
+
+ """
+ if self._has_events or self.engine._has_events:
+ for fn in self.dispatch.before_cursor_execute:
+ statement, parameters = fn(
+ self, cursor, statement, parameters, context, False
+ )
+
+ if self._echo:
+ self._log_info(statement)
+ self._log_info("[raw sql] %r", parameters)
+ try:
+ for fn in (
+ ()
+ if not self.dialect._has_events
+ else self.dialect.dispatch.do_execute
+ ):
+ if fn(cursor, statement, parameters, context):
+ break
+ else:
+ self.dialect.do_execute(cursor, statement, parameters, context)
+ except BaseException as e:
+ self._handle_dbapi_exception(
+ e, statement, parameters, cursor, context
+ )
+
+ if self._has_events or self.engine._has_events:
+ self.dispatch.after_cursor_execute(
+ self, cursor, statement, parameters, context, False
+ )
+
+ def _safe_close_cursor(self, cursor):
+ """Close the given cursor, catching exceptions
+ and turning into log warnings.
+
+ """
+ try:
+ cursor.close()
+ except Exception:
+ # log the error through the connection pool's logger.
+ self.engine.pool.logger.error(
+ "Error closing cursor", exc_info=True
+ )
+
+ _reentrant_error = False
+ _is_disconnect = False
+
+ def _handle_dbapi_exception(
+ self, e, statement, parameters, cursor, context
+ ):
+ exc_info = sys.exc_info()
+
+ is_exit_exception = util.is_exit_exception(e)
+
+ if not self._is_disconnect:
+ self._is_disconnect = (
+ isinstance(e, self.dialect.dbapi.Error)
+ and not self.closed
+ and self.dialect.is_disconnect(
+ e,
+ self._dbapi_connection if not self.invalidated else None,
+ cursor,
+ )
+ ) or (is_exit_exception and not self.closed)
+
+ invalidate_pool_on_disconnect = not is_exit_exception
+
+ if self._reentrant_error:
+ util.raise_(
+ exc.DBAPIError.instance(
+ statement,
+ parameters,
+ e,
+ self.dialect.dbapi.Error,
+ hide_parameters=self.engine.hide_parameters,
+ dialect=self.dialect,
+ ismulti=context.executemany
+ if context is not None
+ else None,
+ ),
+ with_traceback=exc_info[2],
+ from_=e,
+ )
+ self._reentrant_error = True
+ try:
+ # non-DBAPI error - if we already got a context,
+ # or there's no string statement, don't wrap it
+ should_wrap = isinstance(e, self.dialect.dbapi.Error) or (
+ statement is not None
+ and context is None
+ and not is_exit_exception
+ )
+
+ if should_wrap:
+ sqlalchemy_exception = exc.DBAPIError.instance(
+ statement,
+ parameters,
+ e,
+ self.dialect.dbapi.Error,
+ hide_parameters=self.engine.hide_parameters,
+ connection_invalidated=self._is_disconnect,
+ dialect=self.dialect,
+ ismulti=context.executemany
+ if context is not None
+ else None,
+ )
+ else:
+ sqlalchemy_exception = None
+
+ newraise = None
+
+ if (
+ self._has_events or self.engine._has_events
+ ) and not self._execution_options.get(
+ "skip_user_error_events", False
+ ):
+ ctx = ExceptionContextImpl(
+ e,
+ sqlalchemy_exception,
+ self.engine,
+ self,
+ cursor,
+ statement,
+ parameters,
+ context,
+ self._is_disconnect,
+ invalidate_pool_on_disconnect,
+ )
+
+ for fn in self.dispatch.handle_error:
+ try:
+ # handler returns an exception;
+ # call next handler in a chain
+ per_fn = fn(ctx)
+ if per_fn is not None:
+ ctx.chained_exception = newraise = per_fn
+ except Exception as _raised:
+ # handler raises an exception - stop processing
+ newraise = _raised
+ break
+
+ if self._is_disconnect != ctx.is_disconnect:
+ self._is_disconnect = ctx.is_disconnect
+ if sqlalchemy_exception:
+ sqlalchemy_exception.connection_invalidated = (
+ ctx.is_disconnect
+ )
+
+ # set up potentially user-defined value for
+ # invalidate pool.
+ invalidate_pool_on_disconnect = (
+ ctx.invalidate_pool_on_disconnect
+ )
+
+ if should_wrap and context:
+ context.handle_dbapi_exception(e)
+
+ if not self._is_disconnect:
+ if cursor:
+ self._safe_close_cursor(cursor)
+ with util.safe_reraise(warn_only=True):
+ self._autorollback()
+
+ if newraise:
+ util.raise_(newraise, with_traceback=exc_info[2], from_=e)
+ elif should_wrap:
+ util.raise_(
+ sqlalchemy_exception, with_traceback=exc_info[2], from_=e
+ )
+ else:
+ util.raise_(exc_info[1], with_traceback=exc_info[2])
+
+ finally:
+ del self._reentrant_error
+ if self._is_disconnect:
+ del self._is_disconnect
+ if not self.invalidated:
+ dbapi_conn_wrapper = self._dbapi_connection
+ if invalidate_pool_on_disconnect:
+ self.engine.pool._invalidate(dbapi_conn_wrapper, e)
+ self.invalidate(e)
+ if self.should_close_with_result:
+ assert not self._is_future
+ self.close()
+
+ @classmethod
+ def _handle_dbapi_exception_noconnection(cls, e, dialect, engine):
+ exc_info = sys.exc_info()
+
+ is_disconnect = dialect.is_disconnect(e, None, None)
+
+ should_wrap = isinstance(e, dialect.dbapi.Error)
+
+ if should_wrap:
+ sqlalchemy_exception = exc.DBAPIError.instance(
+ None,
+ None,
+ e,
+ dialect.dbapi.Error,
+ hide_parameters=engine.hide_parameters,
+ connection_invalidated=is_disconnect,
+ )
+ else:
+ sqlalchemy_exception = None
+
+ newraise = None
+
+ if engine._has_events:
+ ctx = ExceptionContextImpl(
+ e,
+ sqlalchemy_exception,
+ engine,
+ None,
+ None,
+ None,
+ None,
+ None,
+ is_disconnect,
+ True,
+ )
+ for fn in engine.dispatch.handle_error:
+ try:
+ # handler returns an exception;
+ # call next handler in a chain
+ per_fn = fn(ctx)
+ if per_fn is not None:
+ ctx.chained_exception = newraise = per_fn
+ except Exception as _raised:
+ # handler raises an exception - stop processing
+ newraise = _raised
+ break
+
+ if sqlalchemy_exception and is_disconnect != ctx.is_disconnect:
+ sqlalchemy_exception.connection_invalidated = (
+ is_disconnect
+ ) = ctx.is_disconnect
+
+ if newraise:
+ util.raise_(newraise, with_traceback=exc_info[2], from_=e)
+ elif should_wrap:
+ util.raise_(
+ sqlalchemy_exception, with_traceback=exc_info[2], from_=e
+ )
+ else:
+ util.raise_(exc_info[1], with_traceback=exc_info[2])
+
+ def _run_ddl_visitor(self, visitorcallable, element, **kwargs):
+ """run a DDL visitor.
+
+ This method is only here so that the MockConnection can change the
+ options given to the visitor so that "checkfirst" is skipped.
+
+ """
+ visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`_engine.Connection.transaction` "
+ "method is deprecated and will be "
+ "removed in a future release. Use the :meth:`_engine.Engine.begin` "
+ "context manager instead.",
+ )
+ def transaction(self, callable_, *args, **kwargs):
+ r"""Execute the given function within a transaction boundary.
+
+ The function is passed this :class:`_engine.Connection`
+ as the first argument, followed by the given \*args and \**kwargs,
+ e.g.::
+
+ def do_something(conn, x, y):
+ conn.execute(text("some statement"), {'x':x, 'y':y})
+
+ conn.transaction(do_something, 5, 10)
+
+ The operations inside the function are all invoked within the
+ context of a single :class:`.Transaction`.
+ Upon success, the transaction is committed. If an
+ exception is raised, the transaction is rolled back
+ before propagating the exception.
+
+ .. note::
+
+ The :meth:`.transaction` method is superseded by
+ the usage of the Python ``with:`` statement, which can
+ be used with :meth:`_engine.Connection.begin`::
+
+ with conn.begin():
+ conn.execute(text("some statement"), {'x':5, 'y':10})
+
+ As well as with :meth:`_engine.Engine.begin`::
+
+ with engine.begin() as conn:
+ conn.execute(text("some statement"), {'x':5, 'y':10})
+
+ .. seealso::
+
+ :meth:`_engine.Engine.begin` - engine-level transactional
+ context
+
+ :meth:`_engine.Engine.transaction` - engine-level version of
+ :meth:`_engine.Connection.transaction`
+
+ """
+
+ kwargs["_sa_skip_warning"] = True
+ trans = self.begin()
+ try:
+ ret = self.run_callable(callable_, *args, **kwargs)
+ trans.commit()
+ return ret
+ except:
+ with util.safe_reraise():
+ trans.rollback()
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`_engine.Connection.run_callable` "
+ "method is deprecated and will "
+ "be removed in a future release. Invoke the callable function "
+ "directly, passing the Connection.",
+ )
+ def run_callable(self, callable_, *args, **kwargs):
+ r"""Given a callable object or function, execute it, passing
+ a :class:`_engine.Connection` as the first argument.
+
+ The given \*args and \**kwargs are passed subsequent
+ to the :class:`_engine.Connection` argument.
+
+ This function, along with :meth:`_engine.Engine.run_callable`,
+ allows a function to be run with a :class:`_engine.Connection`
+ or :class:`_engine.Engine` object without the need to know
+ which one is being dealt with.
+
+ """
+ return callable_(self, *args, **kwargs)
+
+
+class ExceptionContextImpl(ExceptionContext):
+ """Implement the :class:`.ExceptionContext` interface."""
+
+ def __init__(
+ self,
+ exception,
+ sqlalchemy_exception,
+ engine,
+ connection,
+ cursor,
+ statement,
+ parameters,
+ context,
+ is_disconnect,
+ invalidate_pool_on_disconnect,
+ ):
+ self.engine = engine
+ self.connection = connection
+ self.sqlalchemy_exception = sqlalchemy_exception
+ self.original_exception = exception
+ self.execution_context = context
+ self.statement = statement
+ self.parameters = parameters
+ self.is_disconnect = is_disconnect
+ self.invalidate_pool_on_disconnect = invalidate_pool_on_disconnect
+
+
+class Transaction(TransactionalContext):
+ """Represent a database transaction in progress.
+
+ The :class:`.Transaction` object is procured by
+ calling the :meth:`_engine.Connection.begin` method of
+ :class:`_engine.Connection`::
+
+ from sqlalchemy import create_engine
+ engine = create_engine("postgresql://scott:tiger@localhost/test")
+ connection = engine.connect()
+ trans = connection.begin()
+ connection.execute(text("insert into x (a, b) values (1, 2)"))
+ trans.commit()
+
+ The object provides :meth:`.rollback` and :meth:`.commit`
+ methods in order to control transaction boundaries. It
+ also implements a context manager interface so that
+ the Python ``with`` statement can be used with the
+ :meth:`_engine.Connection.begin` method::
+
+ with connection.begin():
+ connection.execute(text("insert into x (a, b) values (1, 2)"))
+
+ The Transaction object is **not** threadsafe.
+
+ .. seealso::
+
+ :meth:`_engine.Connection.begin`
+
+ :meth:`_engine.Connection.begin_twophase`
+
+ :meth:`_engine.Connection.begin_nested`
+
+ .. index::
+ single: thread safety; Transaction
+ """
+
+ __slots__ = ()
+
+ _is_root = False
+
+ def __init__(self, connection):
+ raise NotImplementedError()
+
+ def _do_deactivate(self):
+ """do whatever steps are necessary to set this transaction as
+ "deactive", however leave this transaction object in place as far
+ as the connection's state.
+
+ for a "real" transaction this should roll back the transaction
+ and ensure this transaction is no longer a reset agent.
+
+ this is used for nesting of marker transactions where the marker
+ can set the "real" transaction as rolled back, however it stays
+ in place.
+
+ for 2.0 we hope to remove this nesting feature.
+
+ """
+ raise NotImplementedError()
+
+ @property
+ def _deactivated_from_connection(self):
+ """True if this transaction is totally deactivated from the connection
+ and therefore can no longer affect its state.
+
+ """
+ raise NotImplementedError()
+
+ def _do_close(self):
+ raise NotImplementedError()
+
+ def _do_rollback(self):
+ raise NotImplementedError()
+
+ def _do_commit(self):
+ raise NotImplementedError()
+
+ @property
+ def is_valid(self):
+ return self.is_active and not self.connection.invalidated
+
+ def close(self):
+ """Close this :class:`.Transaction`.
+
+ If this transaction is the base transaction in a begin/commit
+ nesting, the transaction will rollback(). Otherwise, the
+ method returns.
+
+ This is used to cancel a Transaction without affecting the scope of
+ an enclosing transaction.
+
+ """
+ try:
+ self._do_close()
+ finally:
+ assert not self.is_active
+
+ def rollback(self):
+ """Roll back this :class:`.Transaction`.
+
+ The implementation of this may vary based on the type of transaction in
+ use:
+
+ * For a simple database transaction (e.g. :class:`.RootTransaction`),
+ it corresponds to a ROLLBACK.
+
+ * For a :class:`.NestedTransaction`, it corresponds to a
+ "ROLLBACK TO SAVEPOINT" operation.
+
+ * For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two
+ phase transactions may be used.
+
+
+ """
+ try:
+ self._do_rollback()
+ finally:
+ assert not self.is_active
+
+ def commit(self):
+ """Commit this :class:`.Transaction`.
+
+ The implementation of this may vary based on the type of transaction in
+ use:
+
+ * For a simple database transaction (e.g. :class:`.RootTransaction`),
+ it corresponds to a COMMIT.
+
+ * For a :class:`.NestedTransaction`, it corresponds to a
+ "RELEASE SAVEPOINT" operation.
+
+ * For a :class:`.TwoPhaseTransaction`, DBAPI-specific methods for two
+ phase transactions may be used.
+
+ """
+ try:
+ self._do_commit()
+ finally:
+ assert not self.is_active
+
+ def _get_subject(self):
+ return self.connection
+
+ def _transaction_is_active(self):
+ return self.is_active
+
+ def _transaction_is_closed(self):
+ return not self._deactivated_from_connection
+
+ def _rollback_can_be_called(self):
+ # for RootTransaction / NestedTransaction, it's safe to call
+ # rollback() even if the transaction is deactive and no warnings
+ # will be emitted. tested in
+ # test_transaction.py -> test_no_rollback_in_deactive(?:_savepoint)?
+ return True
+
+
+class MarkerTransaction(Transaction):
+ """A 'marker' transaction that is used for nested begin() calls.
+
+ .. deprecated:: 1.4 future connection for 2.0 won't support this pattern.
+
+ """
+
+ __slots__ = ("connection", "_is_active", "_transaction")
+
+ def __init__(self, connection):
+ assert connection._transaction is not None
+ if not connection._transaction.is_active:
+ raise exc.InvalidRequestError(
+ "the current transaction on this connection is inactive. "
+ "Please issue a rollback first."
+ )
+
+ assert not connection._is_future
+ util.warn_deprecated_20(
+ "Calling .begin() when a transaction is already begun, creating "
+ "a 'sub' transaction, is deprecated "
+ "and will be removed in 2.0. See the documentation section "
+ "'Migrating from the nesting pattern' for background on how "
+ "to migrate from this pattern."
+ )
+
+ self.connection = connection
+
+ if connection._trans_context_manager:
+ TransactionalContext._trans_ctx_check(connection)
+
+ if connection._nested_transaction is not None:
+ self._transaction = connection._nested_transaction
+ else:
+ self._transaction = connection._transaction
+ self._is_active = True
+
+ @property
+ def _deactivated_from_connection(self):
+ return not self.is_active
+
+ @property
+ def is_active(self):
+ return self._is_active and self._transaction.is_active
+
+ def _deactivate(self):
+ self._is_active = False
+
+ def _do_close(self):
+ # does not actually roll back the root
+ self._deactivate()
+
+ def _do_rollback(self):
+ # does roll back the root
+ if self._is_active:
+ try:
+ self._transaction._do_deactivate()
+ finally:
+ self._deactivate()
+
+ def _do_commit(self):
+ self._deactivate()
+
+
+class RootTransaction(Transaction):
+ """Represent the "root" transaction on a :class:`_engine.Connection`.
+
+ This corresponds to the current "BEGIN/COMMIT/ROLLBACK" that's occurring
+ for the :class:`_engine.Connection`. The :class:`_engine.RootTransaction`
+ is created by calling upon the :meth:`_engine.Connection.begin` method, and
+ remains associated with the :class:`_engine.Connection` throughout its
+ active span. The current :class:`_engine.RootTransaction` in use is
+ accessible via the :attr:`_engine.Connection.get_transaction` method of
+ :class:`_engine.Connection`.
+
+ In :term:`2.0 style` use, the :class:`_future.Connection` also employs
+ "autobegin" behavior that will create a new
+ :class:`_engine.RootTransaction` whenever a connection in a
+ non-transactional state is used to emit commands on the DBAPI connection.
+ The scope of the :class:`_engine.RootTransaction` in 2.0 style
+ use can be controlled using the :meth:`_future.Connection.commit` and
+ :meth:`_future.Connection.rollback` methods.
+
+
+ """
+
+ _is_root = True
+
+ __slots__ = ("connection", "is_active")
+
+ def __init__(self, connection):
+ assert connection._transaction is None
+ if connection._trans_context_manager:
+ TransactionalContext._trans_ctx_check(connection)
+ self.connection = connection
+ self._connection_begin_impl()
+ connection._transaction = self
+
+ self.is_active = True
+
+ def _deactivate_from_connection(self):
+ if self.is_active:
+ assert self.connection._transaction is self
+ self.is_active = False
+
+ elif self.connection._transaction is not self:
+ util.warn("transaction already deassociated from connection")
+
+ @property
+ def _deactivated_from_connection(self):
+ return self.connection._transaction is not self
+
+ def _do_deactivate(self):
+ # called from a MarkerTransaction to cancel this root transaction.
+ # the transaction stays in place as connection._transaction, but
+ # is no longer active and is no longer the reset agent for the
+ # pooled connection. the connection won't support a new begin()
+ # until this transaction is explicitly closed, rolled back,
+ # or committed.
+
+ assert self.connection._transaction is self
+
+ if self.is_active:
+ self._connection_rollback_impl()
+
+ # handle case where a savepoint was created inside of a marker
+ # transaction that refers to a root. nested has to be cancelled
+ # also.
+ if self.connection._nested_transaction:
+ self.connection._nested_transaction._cancel()
+
+ self._deactivate_from_connection()
+
+ def _connection_begin_impl(self):
+ self.connection._begin_impl(self)
+
+ def _connection_rollback_impl(self):
+ self.connection._rollback_impl()
+
+ def _connection_commit_impl(self):
+ self.connection._commit_impl()
+
+ def _close_impl(self, try_deactivate=False):
+ try:
+ if self.is_active:
+ self._connection_rollback_impl()
+
+ if self.connection._nested_transaction:
+ self.connection._nested_transaction._cancel()
+ finally:
+ if self.is_active or try_deactivate:
+ self._deactivate_from_connection()
+ if self.connection._transaction is self:
+ self.connection._transaction = None
+
+ assert not self.is_active
+ assert self.connection._transaction is not self
+
+ def _do_close(self):
+ self._close_impl()
+
+ def _do_rollback(self):
+ self._close_impl(try_deactivate=True)
+
+ def _do_commit(self):
+ if self.is_active:
+ assert self.connection._transaction is self
+
+ try:
+ self._connection_commit_impl()
+ finally:
+ # whether or not commit succeeds, cancel any
+ # nested transactions, make this transaction "inactive"
+ # and remove it as a reset agent
+ if self.connection._nested_transaction:
+ self.connection._nested_transaction._cancel()
+
+ self._deactivate_from_connection()
+
+ # ...however only remove as the connection's current transaction
+ # if commit succeeded. otherwise it stays on so that a rollback
+ # needs to occur.
+ self.connection._transaction = None
+ else:
+ if self.connection._transaction is self:
+ self.connection._invalid_transaction()
+ else:
+ raise exc.InvalidRequestError("This transaction is inactive")
+
+ assert not self.is_active
+ assert self.connection._transaction is not self
+
+
+class NestedTransaction(Transaction):
+ """Represent a 'nested', or SAVEPOINT transaction.
+
+ The :class:`.NestedTransaction` object is created by calling the
+ :meth:`_engine.Connection.begin_nested` method of
+ :class:`_engine.Connection`.
+
+ When using :class:`.NestedTransaction`, the semantics of "begin" /
+ "commit" / "rollback" are as follows:
+
+ * the "begin" operation corresponds to the "BEGIN SAVEPOINT" command, where
+ the savepoint is given an explicit name that is part of the state
+ of this object.
+
+ * The :meth:`.NestedTransaction.commit` method corresponds to a
+ "RELEASE SAVEPOINT" operation, using the savepoint identifier associated
+ with this :class:`.NestedTransaction`.
+
+ * The :meth:`.NestedTransaction.rollback` method corresponds to a
+ "ROLLBACK TO SAVEPOINT" operation, using the savepoint identifier
+ associated with this :class:`.NestedTransaction`.
+
+ The rationale for mimicking the semantics of an outer transaction in
+ terms of savepoints so that code may deal with a "savepoint" transaction
+ and an "outer" transaction in an agnostic way.
+
+ .. seealso::
+
+ :ref:`session_begin_nested` - ORM version of the SAVEPOINT API.
+
+ """
+
+ __slots__ = ("connection", "is_active", "_savepoint", "_previous_nested")
+
+ def __init__(self, connection):
+ assert connection._transaction is not None
+ if connection._trans_context_manager:
+ TransactionalContext._trans_ctx_check(connection)
+ self.connection = connection
+ self._savepoint = self.connection._savepoint_impl()
+ self.is_active = True
+ self._previous_nested = connection._nested_transaction
+ connection._nested_transaction = self
+
+ def _deactivate_from_connection(self, warn=True):
+ if self.connection._nested_transaction is self:
+ self.connection._nested_transaction = self._previous_nested
+ elif warn:
+ util.warn(
+ "nested transaction already deassociated from connection"
+ )
+
+ @property
+ def _deactivated_from_connection(self):
+ return self.connection._nested_transaction is not self
+
+ def _cancel(self):
+ # called by RootTransaction when the outer transaction is
+ # committed, rolled back, or closed to cancel all savepoints
+ # without any action being taken
+ self.is_active = False
+ self._deactivate_from_connection()
+ if self._previous_nested:
+ self._previous_nested._cancel()
+
+ def _close_impl(self, deactivate_from_connection, warn_already_deactive):
+ try:
+ if self.is_active and self.connection._transaction.is_active:
+ self.connection._rollback_to_savepoint_impl(self._savepoint)
+ finally:
+ self.is_active = False
+
+ if deactivate_from_connection:
+ self._deactivate_from_connection(warn=warn_already_deactive)
+
+ assert not self.is_active
+ if deactivate_from_connection:
+ assert self.connection._nested_transaction is not self
+
+ def _do_deactivate(self):
+ self._close_impl(False, False)
+
+ def _do_close(self):
+ self._close_impl(True, False)
+
+ def _do_rollback(self):
+ self._close_impl(True, True)
+
+ def _do_commit(self):
+ if self.is_active:
+ try:
+ self.connection._release_savepoint_impl(self._savepoint)
+ finally:
+ # nested trans becomes inactive on failed release
+ # unconditionally. this prevents it from trying to
+ # emit SQL when it rolls back.
+ self.is_active = False
+
+ # but only de-associate from connection if it succeeded
+ self._deactivate_from_connection()
+ else:
+ if self.connection._nested_transaction is self:
+ self.connection._invalid_transaction()
+ else:
+ raise exc.InvalidRequestError(
+ "This nested transaction is inactive"
+ )
+
+
+class TwoPhaseTransaction(RootTransaction):
+ """Represent a two-phase transaction.
+
+ A new :class:`.TwoPhaseTransaction` object may be procured
+ using the :meth:`_engine.Connection.begin_twophase` method.
+
+ The interface is the same as that of :class:`.Transaction`
+ with the addition of the :meth:`prepare` method.
+
+ """
+
+ __slots__ = ("connection", "is_active", "xid", "_is_prepared")
+
+ def __init__(self, connection, xid):
+ self._is_prepared = False
+ self.xid = xid
+ super(TwoPhaseTransaction, self).__init__(connection)
+
+ def prepare(self):
+ """Prepare this :class:`.TwoPhaseTransaction`.
+
+ After a PREPARE, the transaction can be committed.
+
+ """
+ if not self.is_active:
+ raise exc.InvalidRequestError("This transaction is inactive")
+ self.connection._prepare_twophase_impl(self.xid)
+ self._is_prepared = True
+
+ def _connection_begin_impl(self):
+ self.connection._begin_twophase_impl(self)
+
+ def _connection_rollback_impl(self):
+ self.connection._rollback_twophase_impl(self.xid, self._is_prepared)
+
+ def _connection_commit_impl(self):
+ self.connection._commit_twophase_impl(self.xid, self._is_prepared)
+
+
+class Engine(Connectable, log.Identified):
+ """
+ Connects a :class:`~sqlalchemy.pool.Pool` and
+ :class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a
+ source of database connectivity and behavior.
+
+ This is the **SQLAlchemy 1.x version** of :class:`_engine.Engine`. For
+ the :term:`2.0 style` version, which includes some API differences,
+ see :class:`_future.Engine`.
+
+ An :class:`_engine.Engine` object is instantiated publicly using the
+ :func:`~sqlalchemy.create_engine` function.
+
+ .. seealso::
+
+ :doc:`/core/engines`
+
+ :ref:`connections_toplevel`
+
+ """
+
+ _execution_options = _EMPTY_EXECUTION_OPTS
+ _has_events = False
+ _connection_cls = Connection
+ _sqla_logger_namespace = "sqlalchemy.engine.Engine"
+ _is_future = False
+
+ _schema_translate_map = None
+
+ def __init__(
+ self,
+ pool,
+ dialect,
+ url,
+ logging_name=None,
+ echo=None,
+ query_cache_size=500,
+ execution_options=None,
+ hide_parameters=False,
+ ):
+ self.pool = pool
+ self.url = url
+ self.dialect = dialect
+ if logging_name:
+ self.logging_name = logging_name
+ self.echo = echo
+ self.hide_parameters = hide_parameters
+ if query_cache_size != 0:
+ self._compiled_cache = util.LRUCache(
+ query_cache_size, size_alert=self._lru_size_alert
+ )
+ else:
+ self._compiled_cache = None
+ log.instance_logger(self, echoflag=echo)
+ if execution_options:
+ self.update_execution_options(**execution_options)
+
+ def _lru_size_alert(self, cache):
+ if self._should_log_info:
+ self.logger.info(
+ "Compiled cache size pruning from %d items to %d. "
+ "Increase cache size to reduce the frequency of pruning.",
+ len(cache),
+ cache.capacity,
+ )
+
+ @property
+ def engine(self):
+ return self
+
+ def clear_compiled_cache(self):
+ """Clear the compiled cache associated with the dialect.
+
+ This applies **only** to the built-in cache that is established
+ via the :paramref:`_engine.create_engine.query_cache_size` parameter.
+ It will not impact any dictionary caches that were passed via the
+ :paramref:`.Connection.execution_options.query_cache` parameter.
+
+ .. versionadded:: 1.4
+
+ """
+ if self._compiled_cache:
+ self._compiled_cache.clear()
+
+ def update_execution_options(self, **opt):
+ r"""Update the default execution_options dictionary
+ of this :class:`_engine.Engine`.
+
+ The given keys/values in \**opt are added to the
+ default execution options that will be used for
+ all connections. The initial contents of this dictionary
+ can be sent via the ``execution_options`` parameter
+ to :func:`_sa.create_engine`.
+
+ .. seealso::
+
+ :meth:`_engine.Connection.execution_options`
+
+ :meth:`_engine.Engine.execution_options`
+
+ """
+ self._execution_options = self._execution_options.union(opt)
+ self.dispatch.set_engine_execution_options(self, opt)
+ self.dialect.set_engine_execution_options(self, opt)
+
+ def execution_options(self, **opt):
+ """Return a new :class:`_engine.Engine` that will provide
+ :class:`_engine.Connection` objects with the given execution options.
+
+ The returned :class:`_engine.Engine` remains related to the original
+ :class:`_engine.Engine` in that it shares the same connection pool and
+ other state:
+
+ * The :class:`_pool.Pool` used by the new :class:`_engine.Engine`
+ is the
+ same instance. The :meth:`_engine.Engine.dispose`
+ method will replace
+ the connection pool instance for the parent engine as well
+ as this one.
+ * Event listeners are "cascaded" - meaning, the new
+ :class:`_engine.Engine`
+ inherits the events of the parent, and new events can be associated
+ with the new :class:`_engine.Engine` individually.
+ * The logging configuration and logging_name is copied from the parent
+ :class:`_engine.Engine`.
+
+ The intent of the :meth:`_engine.Engine.execution_options` method is
+ to implement "sharding" schemes where multiple :class:`_engine.Engine`
+ objects refer to the same connection pool, but are differentiated
+ by options that would be consumed by a custom event::
+
+ primary_engine = create_engine("mysql://")
+ shard1 = primary_engine.execution_options(shard_id="shard1")
+ shard2 = primary_engine.execution_options(shard_id="shard2")
+
+ Above, the ``shard1`` engine serves as a factory for
+ :class:`_engine.Connection`
+ objects that will contain the execution option
+ ``shard_id=shard1``, and ``shard2`` will produce
+ :class:`_engine.Connection`
+ objects that contain the execution option ``shard_id=shard2``.
+
+ An event handler can consume the above execution option to perform
+ a schema switch or other operation, given a connection. Below
+ we emit a MySQL ``use`` statement to switch databases, at the same
+ time keeping track of which database we've established using the
+ :attr:`_engine.Connection.info` dictionary,
+ which gives us a persistent
+ storage space that follows the DBAPI connection::
+
+ from sqlalchemy import event
+ from sqlalchemy.engine import Engine
+
+ shards = {"default": "base", shard_1: "db1", "shard_2": "db2"}
+
+ @event.listens_for(Engine, "before_cursor_execute")
+ def _switch_shard(conn, cursor, stmt,
+ params, context, executemany):
+ shard_id = conn._execution_options.get('shard_id', "default")
+ current_shard = conn.info.get("current_shard", None)
+
+ if current_shard != shard_id:
+ cursor.execute("use %s" % shards[shard_id])
+ conn.info["current_shard"] = shard_id
+
+ .. seealso::
+
+ :meth:`_engine.Connection.execution_options`
+ - update execution options
+ on a :class:`_engine.Connection` object.
+
+ :meth:`_engine.Engine.update_execution_options`
+ - update the execution
+ options for a given :class:`_engine.Engine` in place.
+
+ :meth:`_engine.Engine.get_execution_options`
+
+
+ """
+ return self._option_cls(self, opt)
+
+ def get_execution_options(self):
+ """Get the non-SQL options which will take effect during execution.
+
+ .. versionadded: 1.3
+
+ .. seealso::
+
+ :meth:`_engine.Engine.execution_options`
+ """
+ return self._execution_options
+
+ @property
+ def name(self):
+ """String name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
+ in use by this :class:`Engine`."""
+
+ return self.dialect.name
+
+ @property
+ def driver(self):
+ """Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect`
+ in use by this :class:`Engine`."""
+
+ return self.dialect.driver
+
+ echo = log.echo_property()
+
+ def __repr__(self):
+ return "Engine(%r)" % (self.url,)
+
+ def dispose(self, close=True):
+ """Dispose of the connection pool used by this
+ :class:`_engine.Engine`.
+
+ A new connection pool is created immediately after the old one has been
+ disposed. The previous connection pool is disposed either actively, by
+ closing out all currently checked-in connections in that pool, or
+ passively, by losing references to it but otherwise not closing any
+ connections. The latter strategy is more appropriate for an initializer
+ in a forked Python process.
+
+ :param close: if left at its default of ``True``, has the
+ effect of fully closing all **currently checked in**
+ database connections. Connections that are still checked out
+ will **not** be closed, however they will no longer be associated
+ with this :class:`_engine.Engine`,
+ so when they are closed individually, eventually the
+ :class:`_pool.Pool` which they are associated with will
+ be garbage collected and they will be closed out fully, if
+ not already closed on checkin.
+
+ If set to ``False``, the previous connection pool is de-referenced,
+ and otherwise not touched in any way.
+
+ .. versionadded:: 1.4.33 Added the :paramref:`.Engine.dispose.close`
+ parameter to allow the replacement of a connection pool in a child
+ process without interfering with the connections used by the parent
+ process.
+
+
+ .. seealso::
+
+ :ref:`engine_disposal`
+
+ :ref:`pooling_multiprocessing`
+
+ """
+ if close:
+ self.pool.dispose()
+ self.pool = self.pool.recreate()
+ self.dispatch.engine_disposed(self)
+
+ def _execute_default(
+ self, default, multiparams=(), params=util.EMPTY_DICT
+ ):
+ with self.connect() as conn:
+ return conn._execute_default(default, multiparams, params)
+
+ @contextlib.contextmanager
+ def _optional_conn_ctx_manager(self, connection=None):
+ if connection is None:
+ with self.connect() as conn:
+ yield conn
+ else:
+ yield connection
+
+ class _trans_ctx(object):
+ def __init__(self, conn, transaction, close_with_result):
+ self.conn = conn
+ self.transaction = transaction
+ self.close_with_result = close_with_result
+
+ def __enter__(self):
+ self.transaction.__enter__()
+ return self.conn
+
+ def __exit__(self, type_, value, traceback):
+ try:
+ self.transaction.__exit__(type_, value, traceback)
+ finally:
+ if not self.close_with_result:
+ self.conn.close()
+
+ def begin(self, close_with_result=False):
+ """Return a context manager delivering a :class:`_engine.Connection`
+ with a :class:`.Transaction` established.
+
+ E.g.::
+
+ with engine.begin() as conn:
+ conn.execute(
+ text("insert into table (x, y, z) values (1, 2, 3)")
+ )
+ conn.execute(text("my_special_procedure(5)"))
+
+ Upon successful operation, the :class:`.Transaction`
+ is committed. If an error is raised, the :class:`.Transaction`
+ is rolled back.
+
+ Legacy use only: the ``close_with_result`` flag is normally ``False``,
+ and indicates that the :class:`_engine.Connection` will be closed when
+ the operation is complete. When set to ``True``, it indicates the
+ :class:`_engine.Connection` is in "single use" mode, where the
+ :class:`_engine.CursorResult` returned by the first call to
+ :meth:`_engine.Connection.execute` will close the
+ :class:`_engine.Connection` when that :class:`_engine.CursorResult` has
+ exhausted all result rows.
+
+ .. seealso::
+
+ :meth:`_engine.Engine.connect` - procure a
+ :class:`_engine.Connection` from
+ an :class:`_engine.Engine`.
+
+ :meth:`_engine.Connection.begin` - start a :class:`.Transaction`
+ for a particular :class:`_engine.Connection`.
+
+ """
+ if self._connection_cls._is_future:
+ conn = self.connect()
+ else:
+ conn = self.connect(close_with_result=close_with_result)
+ try:
+ trans = conn.begin()
+ except:
+ with util.safe_reraise():
+ conn.close()
+ return Engine._trans_ctx(conn, trans, close_with_result)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`_engine.Engine.transaction` "
+ "method is deprecated and will be "
+ "removed in a future release. Use the :meth:`_engine.Engine.begin` "
+ "context "
+ "manager instead.",
+ )
+ def transaction(self, callable_, *args, **kwargs):
+ r"""Execute the given function within a transaction boundary.
+
+ The function is passed a :class:`_engine.Connection` newly procured
+ from :meth:`_engine.Engine.connect` as the first argument,
+ followed by the given \*args and \**kwargs.
+
+ e.g.::
+
+ def do_something(conn, x, y):
+ conn.execute(text("some statement"), {'x':x, 'y':y})
+
+ engine.transaction(do_something, 5, 10)
+
+ The operations inside the function are all invoked within the
+ context of a single :class:`.Transaction`.
+ Upon success, the transaction is committed. If an
+ exception is raised, the transaction is rolled back
+ before propagating the exception.
+
+ .. note::
+
+ The :meth:`.transaction` method is superseded by
+ the usage of the Python ``with:`` statement, which can
+ be used with :meth:`_engine.Engine.begin`::
+
+ with engine.begin() as conn:
+ conn.execute(text("some statement"), {'x':5, 'y':10})
+
+ .. seealso::
+
+ :meth:`_engine.Engine.begin` - engine-level transactional
+ context
+
+ :meth:`_engine.Connection.transaction`
+ - connection-level version of
+ :meth:`_engine.Engine.transaction`
+
+ """
+ kwargs["_sa_skip_warning"] = True
+ with self.connect() as conn:
+ return conn.transaction(callable_, *args, **kwargs)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`_engine.Engine.run_callable` "
+ "method is deprecated and will be "
+ "removed in a future release. Use the :meth:`_engine.Engine.begin` "
+ "context manager instead.",
+ )
+ def run_callable(self, callable_, *args, **kwargs):
+ r"""Given a callable object or function, execute it, passing
+ a :class:`_engine.Connection` as the first argument.
+
+ The given \*args and \**kwargs are passed subsequent
+ to the :class:`_engine.Connection` argument.
+
+ This function, along with :meth:`_engine.Connection.run_callable`,
+ allows a function to be run with a :class:`_engine.Connection`
+ or :class:`_engine.Engine` object without the need to know
+ which one is being dealt with.
+
+ """
+ kwargs["_sa_skip_warning"] = True
+ with self.connect() as conn:
+ return conn.run_callable(callable_, *args, **kwargs)
+
+ def _run_ddl_visitor(self, visitorcallable, element, **kwargs):
+ with self.begin() as conn:
+ conn._run_ddl_visitor(visitorcallable, element, **kwargs)
+
+ @util.deprecated_20(
+ ":meth:`_engine.Engine.execute`",
+ alternative="All statement execution in SQLAlchemy 2.0 is performed "
+ "by the :meth:`_engine.Connection.execute` method of "
+ ":class:`_engine.Connection`, "
+ "or in the ORM by the :meth:`.Session.execute` method of "
+ ":class:`.Session`.",
+ )
+ def execute(self, statement, *multiparams, **params):
+ """Executes the given construct and returns a
+ :class:`_engine.CursorResult`.
+
+ The arguments are the same as those used by
+ :meth:`_engine.Connection.execute`.
+
+ Here, a :class:`_engine.Connection` is acquired using the
+ :meth:`_engine.Engine.connect` method, and the statement executed
+ with that connection. The returned :class:`_engine.CursorResult`
+ is flagged
+ such that when the :class:`_engine.CursorResult` is exhausted and its
+ underlying cursor is closed, the :class:`_engine.Connection`
+ created here
+ will also be closed, which allows its associated DBAPI connection
+ resource to be returned to the connection pool.
+
+ """
+ connection = self.connect(close_with_result=True)
+ return connection.execute(statement, *multiparams, **params)
+
+ @util.deprecated_20(
+ ":meth:`_engine.Engine.scalar`",
+ alternative="All statement execution in SQLAlchemy 2.0 is performed "
+ "by the :meth:`_engine.Connection.execute` method of "
+ ":class:`_engine.Connection`, "
+ "or in the ORM by the :meth:`.Session.execute` method of "
+ ":class:`.Session`; the :meth:`_future.Result.scalar` "
+ "method can then be "
+ "used to return a scalar result.",
+ )
+ def scalar(self, statement, *multiparams, **params):
+ """Executes and returns the first column of the first row.
+
+ The underlying result/cursor is closed after execution.
+ """
+ return self.execute(statement, *multiparams, **params).scalar()
+
+ def _execute_clauseelement(
+ self,
+ elem,
+ multiparams=None,
+ params=None,
+ execution_options=_EMPTY_EXECUTION_OPTS,
+ ):
+ connection = self.connect(close_with_result=True)
+ return connection._execute_clauseelement(
+ elem, multiparams, params, execution_options
+ )
+
+ def _execute_compiled(
+ self,
+ compiled,
+ multiparams,
+ params,
+ execution_options=_EMPTY_EXECUTION_OPTS,
+ ):
+ connection = self.connect(close_with_result=True)
+ return connection._execute_compiled(
+ compiled, multiparams, params, execution_options
+ )
+
+ def connect(self, close_with_result=False):
+ """Return a new :class:`_engine.Connection` object.
+
+ The :class:`_engine.Connection` object is a facade that uses a DBAPI
+ connection internally in order to communicate with the database. This
+ connection is procured from the connection-holding :class:`_pool.Pool`
+ referenced by this :class:`_engine.Engine`. When the
+ :meth:`_engine.Connection.close` method of the
+ :class:`_engine.Connection` object
+ is called, the underlying DBAPI connection is then returned to the
+ connection pool, where it may be used again in a subsequent call to
+ :meth:`_engine.Engine.connect`.
+
+ """
+
+ return self._connection_cls(self, close_with_result=close_with_result)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`_engine.Engine.table_names` "
+ "method is deprecated and will be "
+ "removed in a future release. Please refer to "
+ ":meth:`_reflection.Inspector.get_table_names`.",
+ )
+ def table_names(self, schema=None, connection=None):
+ """Return a list of all table names available in the database.
+
+ :param schema: Optional, retrieve names from a non-default schema.
+
+ :param connection: Optional, use a specified connection.
+ """
+ with self._optional_conn_ctx_manager(connection) as conn:
+ insp = inspection.inspect(conn)
+ return insp.get_table_names(schema)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`_engine.Engine.has_table` "
+ "method is deprecated and will be "
+ "removed in a future release. Please refer to "
+ ":meth:`_reflection.Inspector.has_table`.",
+ )
+ def has_table(self, table_name, schema=None):
+ """Return True if the given backend has a table of the given name.
+
+ .. seealso::
+
+ :ref:`metadata_reflection_inspector` - detailed schema inspection
+ using the :class:`_reflection.Inspector` interface.
+
+ :class:`.quoted_name` - used to pass quoting information along
+ with a schema identifier.
+
+ """
+ with self._optional_conn_ctx_manager(None) as conn:
+ insp = inspection.inspect(conn)
+ return insp.has_table(table_name, schema=schema)
+
+ def _wrap_pool_connect(self, fn, connection):
+ dialect = self.dialect
+ try:
+ return fn()
+ except dialect.dbapi.Error as e:
+ if connection is None:
+ Connection._handle_dbapi_exception_noconnection(
+ e, dialect, self
+ )
+ else:
+ util.raise_(
+ sys.exc_info()[1], with_traceback=sys.exc_info()[2]
+ )
+
+ def raw_connection(self, _connection=None):
+ """Return a "raw" DBAPI connection from the connection pool.
+
+ The returned object is a proxied version of the DBAPI
+ connection object used by the underlying driver in use.
+ The object will have all the same behavior as the real DBAPI
+ connection, except that its ``close()`` method will result in the
+ connection being returned to the pool, rather than being closed
+ for real.
+
+ This method provides direct DBAPI connection access for
+ special situations when the API provided by
+ :class:`_engine.Connection`
+ is not needed. When a :class:`_engine.Connection` object is already
+ present, the DBAPI connection is available using
+ the :attr:`_engine.Connection.connection` accessor.
+
+ .. seealso::
+
+ :ref:`dbapi_connections`
+
+ """
+ return self._wrap_pool_connect(self.pool.connect, _connection)
+
+
+class OptionEngineMixin(object):
+ _sa_propagate_class_events = False
+
+ def __init__(self, proxied, execution_options):
+ self._proxied = proxied
+ self.url = proxied.url
+ self.dialect = proxied.dialect
+ self.logging_name = proxied.logging_name
+ self.echo = proxied.echo
+ self._compiled_cache = proxied._compiled_cache
+ self.hide_parameters = proxied.hide_parameters
+ log.instance_logger(self, echoflag=self.echo)
+
+ # note: this will propagate events that are assigned to the parent
+ # engine after this OptionEngine is created. Since we share
+ # the events of the parent we also disallow class-level events
+ # to apply to the OptionEngine class directly.
+ #
+ # the other way this can work would be to transfer existing
+ # events only, using:
+ # self.dispatch._update(proxied.dispatch)
+ #
+ # that might be more appropriate however it would be a behavioral
+ # change for logic that assigns events to the parent engine and
+ # would like it to take effect for the already-created sub-engine.
+ self.dispatch = self.dispatch._join(proxied.dispatch)
+
+ self._execution_options = proxied._execution_options
+ self.update_execution_options(**execution_options)
+
+ def _get_pool(self):
+ return self._proxied.pool
+
+ def _set_pool(self, pool):
+ self._proxied.pool = pool
+
+ pool = property(_get_pool, _set_pool)
+
+ def _get_has_events(self):
+ return self._proxied._has_events or self.__dict__.get(
+ "_has_events", False
+ )
+
+ def _set_has_events(self, value):
+ self.__dict__["_has_events"] = value
+
+ _has_events = property(_get_has_events, _set_has_events)
+
+
+class OptionEngine(OptionEngineMixin, Engine):
+ pass
+
+
+Engine._option_cls = OptionEngine
diff --git a/lib/sqlalchemy/engine/characteristics.py b/lib/sqlalchemy/engine/characteristics.py
new file mode 100644
index 0000000..c00bff4
--- /dev/null
+++ b/lib/sqlalchemy/engine/characteristics.py
@@ -0,0 +1,56 @@
+import abc
+
+from ..util import ABC
+
+
+class ConnectionCharacteristic(ABC):
+ """An abstract base for an object that can set, get and reset a
+ per-connection characteristic, typically one that gets reset when the
+ connection is returned to the connection pool.
+
+ transaction isolation is the canonical example, and the
+ ``IsolationLevelCharacteristic`` implementation provides this for the
+ ``DefaultDialect``.
+
+ The ``ConnectionCharacteristic`` class should call upon the ``Dialect`` for
+ the implementation of each method. The object exists strictly to serve as
+ a dialect visitor that can be placed into the
+ ``DefaultDialect.connection_characteristics`` dictionary where it will take
+ effect for calls to :meth:`_engine.Connection.execution_options` and
+ related APIs.
+
+ .. versionadded:: 1.4
+
+ """
+
+ __slots__ = ()
+
+ transactional = False
+
+ @abc.abstractmethod
+ def reset_characteristic(self, dialect, dbapi_conn):
+ """Reset the characteristic on the connection to its default value."""
+
+ @abc.abstractmethod
+ def set_characteristic(self, dialect, dbapi_conn, value):
+ """set characteristic on the connection to a given value."""
+
+ @abc.abstractmethod
+ def get_characteristic(self, dialect, dbapi_conn):
+ """Given a DBAPI connection, get the current value of the
+ characteristic.
+
+ """
+
+
+class IsolationLevelCharacteristic(ConnectionCharacteristic):
+ transactional = True
+
+ def reset_characteristic(self, dialect, dbapi_conn):
+ dialect.reset_isolation_level(dbapi_conn)
+
+ def set_characteristic(self, dialect, dbapi_conn, value):
+ dialect.set_isolation_level(dbapi_conn, value)
+
+ def get_characteristic(self, dialect, dbapi_conn):
+ return dialect.get_isolation_level(dbapi_conn)
diff --git a/lib/sqlalchemy/engine/create.py b/lib/sqlalchemy/engine/create.py
new file mode 100644
index 0000000..b9886b7
--- /dev/null
+++ b/lib/sqlalchemy/engine/create.py
@@ -0,0 +1,743 @@
+# engine/create.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+
+from . import base
+from . import url as _url
+from .mock import create_mock_engine
+from .. import event
+from .. import exc
+from .. import pool as poollib
+from .. import util
+from ..sql import compiler
+
+
+@util.deprecated_params(
+ strategy=(
+ "1.4",
+ "The :paramref:`_sa.create_engine.strategy` keyword is deprecated, "
+ "and the only argument accepted is 'mock'; please use "
+ ":func:`.create_mock_engine` going forward. For general "
+ "customization of create_engine which may have been accomplished "
+ "using strategies, see :class:`.CreateEnginePlugin`.",
+ ),
+ empty_in_strategy=(
+ "1.4",
+ "The :paramref:`_sa.create_engine.empty_in_strategy` keyword is "
+ "deprecated, and no longer has any effect. All IN expressions "
+ "are now rendered using "
+ 'the "expanding parameter" strategy which renders a set of bound'
+ 'expressions, or an "empty set" SELECT, at statement execution'
+ "time.",
+ ),
+ case_sensitive=(
+ "1.4",
+ "The :paramref:`_sa.create_engine.case_sensitive` parameter "
+ "is deprecated and will be removed in a future release. "
+ "Applications should work with result column names in a case "
+ "sensitive fashion.",
+ ),
+)
+def create_engine(url, **kwargs):
+ """Create a new :class:`_engine.Engine` instance.
+
+ The standard calling form is to send the :ref:`URL <database_urls>` as the
+ first positional argument, usually a string
+ that indicates database dialect and connection arguments::
+
+ engine = create_engine("postgresql://scott:tiger@localhost/test")
+
+ .. note::
+
+ Please review :ref:`database_urls` for general guidelines in composing
+ URL strings. In particular, special characters, such as those often
+ part of passwords, must be URL encoded to be properly parsed.
+
+ Additional keyword arguments may then follow it which
+ establish various options on the resulting :class:`_engine.Engine`
+ and its underlying :class:`.Dialect` and :class:`_pool.Pool`
+ constructs::
+
+ engine = create_engine("mysql://scott:tiger@hostname/dbname",
+ encoding='latin1', echo=True)
+
+ The string form of the URL is
+ ``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
+ ``dialect`` is a database name such as ``mysql``, ``oracle``,
+ ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
+ ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
+ the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
+
+ ``**kwargs`` takes a wide variety of options which are routed
+ towards their appropriate components. Arguments may be specific to
+ the :class:`_engine.Engine`, the underlying :class:`.Dialect`,
+ as well as the
+ :class:`_pool.Pool`. Specific dialects also accept keyword arguments that
+ are unique to that dialect. Here, we describe the parameters
+ that are common to most :func:`_sa.create_engine()` usage.
+
+ Once established, the newly resulting :class:`_engine.Engine` will
+ request a connection from the underlying :class:`_pool.Pool` once
+ :meth:`_engine.Engine.connect` is called, or a method which depends on it
+ such as :meth:`_engine.Engine.execute` is invoked. The
+ :class:`_pool.Pool` in turn
+ will establish the first actual DBAPI connection when this request
+ is received. The :func:`_sa.create_engine` call itself does **not**
+ establish any actual DBAPI connections directly.
+
+ .. seealso::
+
+ :doc:`/core/engines`
+
+ :doc:`/dialects/index`
+
+ :ref:`connections_toplevel`
+
+ :param case_sensitive: if False, result column names
+ will match in a case-insensitive fashion, that is,
+ ``row['SomeColumn']``.
+
+ :param connect_args: a dictionary of options which will be
+ passed directly to the DBAPI's ``connect()`` method as
+ additional keyword arguments. See the example
+ at :ref:`custom_dbapi_args`.
+
+ :param convert_unicode=False: if set to True, causes
+ all :class:`.String` datatypes to act as though the
+ :paramref:`.String.convert_unicode` flag has been set to ``True``,
+ regardless of a setting of ``False`` on an individual :class:`.String`
+ type. This has the effect of causing all :class:`.String` -based
+ columns to accommodate Python Unicode objects directly as though the
+ datatype were the :class:`.Unicode` type.
+
+ .. deprecated:: 1.3
+
+ The :paramref:`_sa.create_engine.convert_unicode` parameter
+ is deprecated and will be removed in a future release.
+ All modern DBAPIs now support Python Unicode directly and this
+ parameter is unnecessary.
+
+ :param creator: a callable which returns a DBAPI connection.
+ This creation function will be passed to the underlying
+ connection pool and will be used to create all new database
+ connections. Usage of this function causes connection
+ parameters specified in the URL argument to be bypassed.
+
+ This hook is not as flexible as the newer
+ :meth:`_events.DialectEvents.do_connect` hook which allows complete
+ control over how a connection is made to the database, given the full
+ set of URL arguments and state beforehand.
+
+ .. seealso::
+
+ :meth:`_events.DialectEvents.do_connect` - event hook that allows
+ full control over DBAPI connection mechanics.
+
+ :ref:`custom_dbapi_args`
+
+ :param echo=False: if True, the Engine will log all statements
+ as well as a ``repr()`` of their parameter lists to the default log
+ handler, which defaults to ``sys.stdout`` for output. If set to the
+ string ``"debug"``, result rows will be printed to the standard output
+ as well. The ``echo`` attribute of ``Engine`` can be modified at any
+ time to turn logging on and off; direct control of logging is also
+ available using the standard Python ``logging`` module.
+
+ .. seealso::
+
+ :ref:`dbengine_logging` - further detail on how to configure
+ logging.
+
+
+ :param echo_pool=False: if True, the connection pool will log
+ informational output such as when connections are invalidated
+ as well as when connections are recycled to the default log handler,
+ which defaults to ``sys.stdout`` for output. If set to the string
+ ``"debug"``, the logging will include pool checkouts and checkins.
+ Direct control of logging is also available using the standard Python
+ ``logging`` module.
+
+ .. seealso::
+
+ :ref:`dbengine_logging` - further detail on how to configure
+ logging.
+
+
+ :param empty_in_strategy: No longer used; SQLAlchemy now uses
+ "empty set" behavior for IN in all cases.
+
+ :param enable_from_linting: defaults to True. Will emit a warning
+ if a given SELECT statement is found to have un-linked FROM elements
+ which would cause a cartesian product.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :ref:`change_4737`
+
+ :param encoding: **legacy Python 2 value only, where it only applies to
+ specific DBAPIs, not used in Python 3 for any modern DBAPI driver.
+ Please refer to individual dialect documentation for client encoding
+ behaviors.** Defaults to the string value ``utf-8``. This value
+ refers **only** to the character encoding that is used when SQLAlchemy
+ sends or receives data from a :term:`DBAPI` that does not support
+ Python Unicode and **is only used under Python 2**, only for certain
+ DBAPI drivers, and only in certain circumstances. **Python 3 users
+ please DISREGARD this parameter and refer to the documentation for the
+ specific dialect in use in order to configure character encoding
+ behavior.**
+
+ .. note:: The ``encoding`` parameter deals only with in-Python
+ encoding issues that were prevalent with **some DBAPIS only**
+ under **Python 2 only**. Under Python 3 it is not used by
+ any modern dialect. For DBAPIs that require
+ client encoding configurations, which are most of those outside
+ of SQLite, please consult specific :ref:`dialect documentation
+ <dialect_toplevel>` for details.
+
+ All modern DBAPIs that work in Python 3 necessarily feature direct
+ support for Python unicode strings. Under Python 2, this was not
+ always the case. For those scenarios where the DBAPI is detected as
+ not supporting a Python ``unicode`` object under Python 2, this
+ encoding is used to determine the source/destination encoding. It is
+ **not used** for those cases where the DBAPI handles unicode directly.
+
+ To properly configure a system to accommodate Python ``unicode``
+ objects, the DBAPI should be configured to handle unicode to the
+ greatest degree as is appropriate - see the notes on unicode pertaining
+ to the specific target database in use at :ref:`dialect_toplevel`.
+
+ Areas where string encoding may need to be accommodated
+ outside of the DBAPI, nearly always under **Python 2 only**,
+ include zero or more of:
+
+ * the values passed to bound parameters, corresponding to
+ the :class:`.Unicode` type or the :class:`.String` type
+ when ``convert_unicode`` is ``True``;
+ * the values returned in result set columns corresponding
+ to the :class:`.Unicode` type or the :class:`.String`
+ type when ``convert_unicode`` is ``True``;
+ * the string SQL statement passed to the DBAPI's
+ ``cursor.execute()`` method;
+ * the string names of the keys in the bound parameter
+ dictionary passed to the DBAPI's ``cursor.execute()``
+ as well as ``cursor.setinputsizes()`` methods;
+ * the string column names retrieved from the DBAPI's
+ ``cursor.description`` attribute.
+
+ When using Python 3, the DBAPI is required to support all of the above
+ values as Python ``unicode`` objects, which in Python 3 are just known
+ as ``str``. In Python 2, the DBAPI does not specify unicode behavior
+ at all, so SQLAlchemy must make decisions for each of the above values
+ on a per-DBAPI basis - implementations are completely inconsistent in
+ their behavior.
+
+ :param execution_options: Dictionary execution options which will
+ be applied to all connections. See
+ :meth:`~sqlalchemy.engine.Connection.execution_options`
+
+ :param future: Use the 2.0 style :class:`_future.Engine` and
+ :class:`_future.Connection` API.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :ref:`migration_20_toplevel`
+
+ :param hide_parameters: Boolean, when set to True, SQL statement parameters
+ will not be displayed in INFO logging nor will they be formatted into
+ the string representation of :class:`.StatementError` objects.
+
+ .. versionadded:: 1.3.8
+
+ .. seealso::
+
+ :ref:`dbengine_logging` - further detail on how to configure
+ logging.
+
+ :param implicit_returning=True: Legacy flag that when set to ``False``
+ will disable the use of ``RETURNING`` on supporting backends where it
+ would normally be used to fetch newly generated primary key values for
+ single-row INSERT statements that do not otherwise specify a RETURNING
+ clause. This behavior applies primarily to the PostgreSQL, Oracle,
+ SQL Server backends.
+
+ .. warning:: this flag originally allowed the "implicit returning"
+ feature to be *enabled* back when it was very new and there was not
+ well-established database support. In modern SQLAlchemy, this flag
+ should **always be set to True**. Some SQLAlchemy features will
+ fail to function properly if this flag is set to ``False``.
+
+ :param isolation_level: this string parameter is interpreted by various
+ dialects in order to affect the transaction isolation level of the
+ database connection. The parameter essentially accepts some subset of
+ these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE READ"``,
+ ``"READ COMMITTED"``, ``"READ UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
+ Behavior here varies per backend, and
+ individual dialects should be consulted directly.
+
+ Note that the isolation level can also be set on a
+ per-:class:`_engine.Connection` basis as well, using the
+ :paramref:`.Connection.execution_options.isolation_level`
+ feature.
+
+ .. seealso::
+
+ :ref:`dbapi_autocommit`
+
+ :param json_deserializer: for dialects that support the
+ :class:`_types.JSON`
+ datatype, this is a Python callable that will convert a JSON string
+ to a Python object. By default, the Python ``json.loads`` function is
+ used.
+
+ .. versionchanged:: 1.3.7 The SQLite dialect renamed this from
+ ``_json_deserializer``.
+
+ :param json_serializer: for dialects that support the :class:`_types.JSON`
+ datatype, this is a Python callable that will render a given object
+ as JSON. By default, the Python ``json.dumps`` function is used.
+
+ .. versionchanged:: 1.3.7 The SQLite dialect renamed this from
+ ``_json_serializer``.
+
+
+ :param label_length=None: optional integer value which limits
+ the size of dynamically generated column labels to that many
+ characters. If less than 6, labels are generated as
+ "_(counter)". If ``None``, the value of
+ ``dialect.max_identifier_length``, which may be affected via the
+ :paramref:`_sa.create_engine.max_identifier_length` parameter,
+ is used instead. The value of
+ :paramref:`_sa.create_engine.label_length`
+ may not be larger than that of
+ :paramref:`_sa.create_engine.max_identfier_length`.
+
+ .. seealso::
+
+ :paramref:`_sa.create_engine.max_identifier_length`
+
+ :param listeners: A list of one or more
+ :class:`~sqlalchemy.interfaces.PoolListener` objects which will
+ receive connection pool events.
+
+ :param logging_name: String identifier which will be used within
+ the "name" field of logging records generated within the
+ "sqlalchemy.engine" logger. Defaults to a hexstring of the
+ object's id.
+
+ .. seealso::
+
+ :ref:`dbengine_logging` - further detail on how to configure
+ logging.
+
+ :paramref:`_engine.Connection.execution_options.logging_token`
+
+
+
+ :param max_identifier_length: integer; override the max_identifier_length
+ determined by the dialect. if ``None`` or zero, has no effect. This
+ is the database's configured maximum number of characters that may be
+ used in a SQL identifier such as a table name, column name, or label
+ name. All dialects determine this value automatically, however in the
+ case of a new database version for which this value has changed but
+ SQLAlchemy's dialect has not been adjusted, the value may be passed
+ here.
+
+ .. versionadded:: 1.3.9
+
+ .. seealso::
+
+ :paramref:`_sa.create_engine.label_length`
+
+ :param max_overflow=10: the number of connections to allow in
+ connection pool "overflow", that is connections that can be
+ opened above and beyond the pool_size setting, which defaults
+ to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
+
+ :param module=None: reference to a Python module object (the module
+ itself, not its string name). Specifies an alternate DBAPI module to
+ be used by the engine's dialect. Each sub-dialect references a
+ specific DBAPI which will be imported before first connect. This
+ parameter causes the import to be bypassed, and the given module to
+ be used instead. Can be used for testing of DBAPIs as well as to
+ inject "mock" DBAPI implementations into the :class:`_engine.Engine`.
+
+ :param paramstyle=None: The `paramstyle <https://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
+ to use when rendering bound parameters. This style defaults to the
+ one recommended by the DBAPI itself, which is retrieved from the
+ ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
+ more than one paramstyle, and in particular it may be desirable
+ to change a "named" paramstyle into a "positional" one, or vice versa.
+ When this attribute is passed, it should be one of the values
+ ``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
+ ``"pyformat"``, and should correspond to a parameter style known
+ to be supported by the DBAPI in use.
+
+ :param pool=None: an already-constructed instance of
+ :class:`~sqlalchemy.pool.Pool`, such as a
+ :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
+ pool will be used directly as the underlying connection pool
+ for the engine, bypassing whatever connection parameters are
+ present in the URL argument. For information on constructing
+ connection pools manually, see :ref:`pooling_toplevel`.
+
+ :param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
+ subclass, which will be used to create a connection pool
+ instance using the connection parameters given in the URL. Note
+ this differs from ``pool`` in that you don't actually
+ instantiate the pool in this case, you just indicate what type
+ of pool to be used.
+
+ :param pool_logging_name: String identifier which will be used within
+ the "name" field of logging records generated within the
+ "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
+ id.
+
+
+ .. seealso::
+
+ :ref:`dbengine_logging` - further detail on how to configure
+ logging.
+
+
+ :param pool_pre_ping: boolean, if True will enable the connection pool
+ "pre-ping" feature that tests connections for liveness upon
+ each checkout.
+
+ .. versionadded:: 1.2
+
+ .. seealso::
+
+ :ref:`pool_disconnects_pessimistic`
+
+ :param pool_size=5: the number of connections to keep open
+ inside the connection pool. This used with
+ :class:`~sqlalchemy.pool.QueuePool` as
+ well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
+ :class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
+ of 0 indicates no limit; to disable pooling, set ``poolclass`` to
+ :class:`~sqlalchemy.pool.NullPool` instead.
+
+ :param pool_recycle=-1: this setting causes the pool to recycle
+ connections after the given number of seconds has passed. It
+ defaults to -1, or no timeout. For example, setting to 3600
+ means connections will be recycled after one hour. Note that
+ MySQL in particular will disconnect automatically if no
+ activity is detected on a connection for eight hours (although
+ this is configurable with the MySQLDB connection itself and the
+ server configuration as well).
+
+ .. seealso::
+
+ :ref:`pool_setting_recycle`
+
+ :param pool_reset_on_return='rollback': set the
+ :paramref:`_pool.Pool.reset_on_return` parameter of the underlying
+ :class:`_pool.Pool` object, which can be set to the values
+ ``"rollback"``, ``"commit"``, or ``None``.
+
+ .. seealso::
+
+ :paramref:`_pool.Pool.reset_on_return`
+
+ :param pool_timeout=30: number of seconds to wait before giving
+ up on getting a connection from the pool. This is only used
+ with :class:`~sqlalchemy.pool.QueuePool`. This can be a float but is
+ subject to the limitations of Python time functions which may not be
+ reliable in the tens of milliseconds.
+
+ .. note: don't use 30.0 above, it seems to break with the :param tag
+
+ :param pool_use_lifo=False: use LIFO (last-in-first-out) when retrieving
+ connections from :class:`.QueuePool` instead of FIFO
+ (first-in-first-out). Using LIFO, a server-side timeout scheme can
+ reduce the number of connections used during non- peak periods of
+ use. When planning for server-side timeouts, ensure that a recycle or
+ pre-ping strategy is in use to gracefully handle stale connections.
+
+ .. versionadded:: 1.3
+
+ .. seealso::
+
+ :ref:`pool_use_lifo`
+
+ :ref:`pool_disconnects`
+
+ :param plugins: string list of plugin names to load. See
+ :class:`.CreateEnginePlugin` for background.
+
+ .. versionadded:: 1.2.3
+
+ :param query_cache_size: size of the cache used to cache the SQL string
+ form of queries. Set to zero to disable caching.
+
+ The cache is pruned of its least recently used items when its size reaches
+ N * 1.5. Defaults to 500, meaning the cache will always store at least
+ 500 SQL statements when filled, and will grow up to 750 items at which
+ point it is pruned back down to 500 by removing the 250 least recently
+ used items.
+
+ Caching is accomplished on a per-statement basis by generating a
+ cache key that represents the statement's structure, then generating
+ string SQL for the current dialect only if that key is not present
+ in the cache. All statements support caching, however some features
+ such as an INSERT with a large set of parameters will intentionally
+ bypass the cache. SQL logging will indicate statistics for each
+ statement whether or not it were pull from the cache.
+
+ .. note:: some ORM functions related to unit-of-work persistence as well
+ as some attribute loading strategies will make use of individual
+ per-mapper caches outside of the main cache.
+
+
+ .. seealso::
+
+ :ref:`sql_caching`
+
+ .. versionadded:: 1.4
+
+ """ # noqa
+
+ if "strategy" in kwargs:
+ strat = kwargs.pop("strategy")
+ if strat == "mock":
+ return create_mock_engine(url, **kwargs)
+ else:
+ raise exc.ArgumentError("unknown strategy: %r" % strat)
+
+ kwargs.pop("empty_in_strategy", None)
+
+ # create url.URL object
+ u = _url.make_url(url)
+
+ u, plugins, kwargs = u._instantiate_plugins(kwargs)
+
+ entrypoint = u._get_entrypoint()
+ dialect_cls = entrypoint.get_dialect_cls(u)
+
+ if kwargs.pop("_coerce_config", False):
+
+ def pop_kwarg(key, default=None):
+ value = kwargs.pop(key, default)
+ if key in dialect_cls.engine_config_types:
+ value = dialect_cls.engine_config_types[key](value)
+ return value
+
+ else:
+ pop_kwarg = kwargs.pop
+
+ dialect_args = {}
+ # consume dialect arguments from kwargs
+ for k in util.get_cls_kwargs(dialect_cls):
+ if k in kwargs:
+ dialect_args[k] = pop_kwarg(k)
+
+ dbapi = kwargs.pop("module", None)
+ if dbapi is None:
+ dbapi_args = {}
+ for k in util.get_func_kwargs(dialect_cls.dbapi):
+ if k in kwargs:
+ dbapi_args[k] = pop_kwarg(k)
+ dbapi = dialect_cls.dbapi(**dbapi_args)
+
+ dialect_args["dbapi"] = dbapi
+
+ dialect_args.setdefault("compiler_linting", compiler.NO_LINTING)
+ enable_from_linting = kwargs.pop("enable_from_linting", True)
+ if enable_from_linting:
+ dialect_args["compiler_linting"] ^= compiler.COLLECT_CARTESIAN_PRODUCTS
+
+ for plugin in plugins:
+ plugin.handle_dialect_kwargs(dialect_cls, dialect_args)
+
+ # create dialect
+ dialect = dialect_cls(**dialect_args)
+
+ # assemble connection arguments
+ (cargs, cparams) = dialect.create_connect_args(u)
+ cparams.update(pop_kwarg("connect_args", {}))
+ cargs = list(cargs) # allow mutability
+
+ # look for existing pool or create
+ pool = pop_kwarg("pool", None)
+ if pool is None:
+
+ def connect(connection_record=None):
+ if dialect._has_events:
+ for fn in dialect.dispatch.do_connect:
+ connection = fn(dialect, connection_record, cargs, cparams)
+ if connection is not None:
+ return connection
+ return dialect.connect(*cargs, **cparams)
+
+ creator = pop_kwarg("creator", connect)
+
+ poolclass = pop_kwarg("poolclass", None)
+ if poolclass is None:
+ poolclass = dialect.get_dialect_pool_class(u)
+ pool_args = {"dialect": dialect}
+
+ # consume pool arguments from kwargs, translating a few of
+ # the arguments
+ translate = {
+ "logging_name": "pool_logging_name",
+ "echo": "echo_pool",
+ "timeout": "pool_timeout",
+ "recycle": "pool_recycle",
+ "events": "pool_events",
+ "reset_on_return": "pool_reset_on_return",
+ "pre_ping": "pool_pre_ping",
+ "use_lifo": "pool_use_lifo",
+ }
+ for k in util.get_cls_kwargs(poolclass):
+ tk = translate.get(k, k)
+ if tk in kwargs:
+ pool_args[k] = pop_kwarg(tk)
+
+ for plugin in plugins:
+ plugin.handle_pool_kwargs(poolclass, pool_args)
+
+ pool = poolclass(creator, **pool_args)
+ else:
+ if isinstance(pool, poollib.dbapi_proxy._DBProxy):
+ pool = pool.get_pool(*cargs, **cparams)
+
+ pool._dialect = dialect
+
+ # create engine.
+ if pop_kwarg("future", False):
+ from sqlalchemy import future
+
+ default_engine_class = future.Engine
+ else:
+ default_engine_class = base.Engine
+
+ engineclass = kwargs.pop("_future_engine_class", default_engine_class)
+
+ engine_args = {}
+ for k in util.get_cls_kwargs(engineclass):
+ if k in kwargs:
+ engine_args[k] = pop_kwarg(k)
+
+ # internal flags used by the test suite for instrumenting / proxying
+ # engines with mocks etc.
+ _initialize = kwargs.pop("_initialize", True)
+ _wrap_do_on_connect = kwargs.pop("_wrap_do_on_connect", None)
+
+ # all kwargs should be consumed
+ if kwargs:
+ raise TypeError(
+ "Invalid argument(s) %s sent to create_engine(), "
+ "using configuration %s/%s/%s. Please check that the "
+ "keyword arguments are appropriate for this combination "
+ "of components."
+ % (
+ ",".join("'%s'" % k for k in kwargs),
+ dialect.__class__.__name__,
+ pool.__class__.__name__,
+ engineclass.__name__,
+ )
+ )
+
+ engine = engineclass(pool, dialect, u, **engine_args)
+
+ if _initialize:
+ do_on_connect = dialect.on_connect_url(u)
+ if do_on_connect:
+ if _wrap_do_on_connect:
+ do_on_connect = _wrap_do_on_connect(do_on_connect)
+
+ def on_connect(dbapi_connection, connection_record):
+ do_on_connect(dbapi_connection)
+
+ event.listen(pool, "connect", on_connect)
+
+ def first_connect(dbapi_connection, connection_record):
+ c = base.Connection(
+ engine,
+ connection=dbapi_connection,
+ _has_events=False,
+ # reconnecting will be a reentrant condition, so if the
+ # connection goes away, Connection is then closed
+ _allow_revalidate=False,
+ )
+ c._execution_options = util.EMPTY_DICT
+
+ try:
+ dialect.initialize(c)
+ finally:
+ # note that "invalidated" and "closed" are mutually
+ # exclusive in 1.4 Connection.
+ if not c.invalidated and not c.closed:
+ # transaction is rolled back otherwise, tested by
+ # test/dialect/postgresql/test_dialect.py
+ # ::MiscBackendTest::test_initial_transaction_state
+ dialect.do_rollback(c.connection)
+
+ # previously, the "first_connect" event was used here, which was then
+ # scaled back if the "on_connect" handler were present. now,
+ # since "on_connect" is virtually always present, just use
+ # "connect" event with once_unless_exception in all cases so that
+ # the connection event flow is consistent in all cases.
+ event.listen(
+ pool, "connect", first_connect, _once_unless_exception=True
+ )
+
+ dialect_cls.engine_created(engine)
+ if entrypoint is not dialect_cls:
+ entrypoint.engine_created(engine)
+
+ for plugin in plugins:
+ plugin.engine_created(engine)
+
+ return engine
+
+
+def engine_from_config(configuration, prefix="sqlalchemy.", **kwargs):
+ """Create a new Engine instance using a configuration dictionary.
+
+ The dictionary is typically produced from a config file.
+
+ The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
+ ``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
+ indicates the prefix to be searched for. Each matching key (after the
+ prefix is stripped) is treated as though it were the corresponding keyword
+ argument to a :func:`_sa.create_engine` call.
+
+ The only required key is (assuming the default prefix) ``sqlalchemy.url``,
+ which provides the :ref:`database URL <database_urls>`.
+
+ A select set of keyword arguments will be "coerced" to their
+ expected type based on string values. The set of arguments
+ is extensible per-dialect using the ``engine_config_types`` accessor.
+
+ :param configuration: A dictionary (typically produced from a config file,
+ but this is not a requirement). Items whose keys start with the value
+ of 'prefix' will have that prefix stripped, and will then be passed to
+ :func:`_sa.create_engine`.
+
+ :param prefix: Prefix to match and then strip from keys
+ in 'configuration'.
+
+ :param kwargs: Each keyword argument to ``engine_from_config()`` itself
+ overrides the corresponding item taken from the 'configuration'
+ dictionary. Keyword arguments should *not* be prefixed.
+
+ """
+
+ options = dict(
+ (key[len(prefix) :], configuration[key])
+ for key in configuration
+ if key.startswith(prefix)
+ )
+ options["_coerce_config"] = True
+ options.update(kwargs)
+ url = options.pop("url")
+ return create_engine(url, **options)
diff --git a/lib/sqlalchemy/engine/cursor.py b/lib/sqlalchemy/engine/cursor.py
new file mode 100644
index 0000000..774916d
--- /dev/null
+++ b/lib/sqlalchemy/engine/cursor.py
@@ -0,0 +1,1942 @@
+# engine/cursor.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Define cursor-specific result set constructs including
+:class:`.BaseCursorResult`, :class:`.CursorResult`."""
+
+
+import collections
+import functools
+
+from .result import Result
+from .result import ResultMetaData
+from .result import SimpleResultMetaData
+from .result import tuplegetter
+from .row import LegacyRow
+from .. import exc
+from .. import util
+from ..sql import expression
+from ..sql import sqltypes
+from ..sql import util as sql_util
+from ..sql.base import _generative
+from ..sql.compiler import RM_NAME
+from ..sql.compiler import RM_OBJECTS
+from ..sql.compiler import RM_RENDERED_NAME
+from ..sql.compiler import RM_TYPE
+
+_UNPICKLED = util.symbol("unpickled")
+
+
+# metadata entry tuple indexes.
+# using raw tuple is faster than namedtuple.
+MD_INDEX = 0 # integer index in cursor.description
+MD_RESULT_MAP_INDEX = 1 # integer index in compiled._result_columns
+MD_OBJECTS = 2 # other string keys and ColumnElement obj that can match
+MD_LOOKUP_KEY = 3 # string key we usually expect for key-based lookup
+MD_RENDERED_NAME = 4 # name that is usually in cursor.description
+MD_PROCESSOR = 5 # callable to process a result value into a row
+MD_UNTRANSLATED = 6 # raw name from cursor.description
+
+
+class CursorResultMetaData(ResultMetaData):
+ """Result metadata for DBAPI cursors."""
+
+ __slots__ = (
+ "_keymap",
+ "case_sensitive",
+ "_processors",
+ "_keys",
+ "_keymap_by_result_column_idx",
+ "_tuplefilter",
+ "_translated_indexes",
+ "_safe_for_cache"
+ # don't need _unique_filters support here for now. Can be added
+ # if a need arises.
+ )
+
+ returns_rows = True
+
+ def _has_key(self, key):
+ return key in self._keymap
+
+ def _for_freeze(self):
+ return SimpleResultMetaData(
+ self._keys,
+ extra=[self._keymap[key][MD_OBJECTS] for key in self._keys],
+ )
+
+ def _reduce(self, keys):
+ recs = list(self._metadata_for_keys(keys))
+
+ indexes = [rec[MD_INDEX] for rec in recs]
+ new_keys = [rec[MD_LOOKUP_KEY] for rec in recs]
+
+ if self._translated_indexes:
+ indexes = [self._translated_indexes[idx] for idx in indexes]
+
+ tup = tuplegetter(*indexes)
+
+ new_metadata = self.__class__.__new__(self.__class__)
+ new_metadata.case_sensitive = self.case_sensitive
+ new_metadata._processors = self._processors
+ new_metadata._keys = new_keys
+ new_metadata._tuplefilter = tup
+ new_metadata._translated_indexes = indexes
+
+ new_recs = [
+ (index,) + rec[1:]
+ for index, rec in enumerate(self._metadata_for_keys(keys))
+ ]
+ new_metadata._keymap = {rec[MD_LOOKUP_KEY]: rec for rec in new_recs}
+
+ # TODO: need unit test for:
+ # result = connection.execute("raw sql, no columns").scalars()
+ # without the "or ()" it's failing because MD_OBJECTS is None
+ new_metadata._keymap.update(
+ {
+ e: new_rec
+ for new_rec in new_recs
+ for e in new_rec[MD_OBJECTS] or ()
+ }
+ )
+
+ return new_metadata
+
+ def _adapt_to_context(self, context):
+ """When using a cached Compiled construct that has a _result_map,
+ for a new statement that used the cached Compiled, we need to ensure
+ the keymap has the Column objects from our new statement as keys.
+ So here we rewrite keymap with new entries for the new columns
+ as matched to those of the cached statement.
+
+ """
+
+ if not context.compiled._result_columns:
+ return self
+
+ compiled_statement = context.compiled.statement
+ invoked_statement = context.invoked_statement
+
+ if compiled_statement is invoked_statement:
+ return self
+
+ # make a copy and add the columns from the invoked statement
+ # to the result map.
+ md = self.__class__.__new__(self.__class__)
+
+ md._keymap = dict(self._keymap)
+
+ keymap_by_position = self._keymap_by_result_column_idx
+
+ for idx, new in enumerate(invoked_statement._all_selected_columns):
+ try:
+ rec = keymap_by_position[idx]
+ except KeyError:
+ # this can happen when there are bogus column entries
+ # in a TextualSelect
+ pass
+ else:
+ md._keymap[new] = rec
+
+ md.case_sensitive = self.case_sensitive
+ md._processors = self._processors
+ assert not self._tuplefilter
+ md._tuplefilter = None
+ md._translated_indexes = None
+ md._keys = self._keys
+ md._keymap_by_result_column_idx = self._keymap_by_result_column_idx
+ md._safe_for_cache = self._safe_for_cache
+ return md
+
+ def __init__(self, parent, cursor_description):
+ context = parent.context
+ dialect = context.dialect
+ self._tuplefilter = None
+ self._translated_indexes = None
+ self.case_sensitive = dialect.case_sensitive
+ self._safe_for_cache = False
+
+ if context.result_column_struct:
+ (
+ result_columns,
+ cols_are_ordered,
+ textual_ordered,
+ loose_column_name_matching,
+ ) = context.result_column_struct
+ num_ctx_cols = len(result_columns)
+ else:
+ result_columns = (
+ cols_are_ordered
+ ) = (
+ num_ctx_cols
+ ) = loose_column_name_matching = textual_ordered = False
+
+ # merge cursor.description with the column info
+ # present in the compiled structure, if any
+ raw = self._merge_cursor_description(
+ context,
+ cursor_description,
+ result_columns,
+ num_ctx_cols,
+ cols_are_ordered,
+ textual_ordered,
+ loose_column_name_matching,
+ )
+
+ self._keymap = {}
+
+ # processors in key order for certain per-row
+ # views like __iter__ and slices
+ self._processors = [
+ metadata_entry[MD_PROCESSOR] for metadata_entry in raw
+ ]
+
+ if context.compiled:
+ self._keymap_by_result_column_idx = {
+ metadata_entry[MD_RESULT_MAP_INDEX]: metadata_entry
+ for metadata_entry in raw
+ }
+
+ # keymap by primary string...
+ by_key = dict(
+ [
+ (metadata_entry[MD_LOOKUP_KEY], metadata_entry)
+ for metadata_entry in raw
+ ]
+ )
+
+ # for compiled SQL constructs, copy additional lookup keys into
+ # the key lookup map, such as Column objects, labels,
+ # column keys and other names
+ if num_ctx_cols:
+
+ # if by-primary-string dictionary smaller (or bigger?!) than
+ # number of columns, assume we have dupes, rewrite
+ # dupe records with "None" for index which results in
+ # ambiguous column exception when accessed.
+ if len(by_key) != num_ctx_cols:
+ # new in 1.4: get the complete set of all possible keys,
+ # strings, objects, whatever, that are dupes across two
+ # different records, first.
+ index_by_key = {}
+ dupes = set()
+ for metadata_entry in raw:
+ for key in (metadata_entry[MD_RENDERED_NAME],) + (
+ metadata_entry[MD_OBJECTS] or ()
+ ):
+ if not self.case_sensitive and isinstance(
+ key, util.string_types
+ ):
+ key = key.lower()
+ idx = metadata_entry[MD_INDEX]
+ # if this key has been associated with more than one
+ # positional index, it's a dupe
+ if index_by_key.setdefault(key, idx) != idx:
+ dupes.add(key)
+
+ # then put everything we have into the keymap excluding only
+ # those keys that are dupes.
+ self._keymap.update(
+ [
+ (obj_elem, metadata_entry)
+ for metadata_entry in raw
+ if metadata_entry[MD_OBJECTS]
+ for obj_elem in metadata_entry[MD_OBJECTS]
+ if obj_elem not in dupes
+ ]
+ )
+
+ # then for the dupe keys, put the "ambiguous column"
+ # record into by_key.
+ by_key.update({key: (None, None, (), key) for key in dupes})
+
+ else:
+ # no dupes - copy secondary elements from compiled
+ # columns into self._keymap
+ self._keymap.update(
+ [
+ (obj_elem, metadata_entry)
+ for metadata_entry in raw
+ if metadata_entry[MD_OBJECTS]
+ for obj_elem in metadata_entry[MD_OBJECTS]
+ ]
+ )
+
+ # update keymap with primary string names taking
+ # precedence
+ self._keymap.update(by_key)
+
+ # update keymap with "translated" names (sqlite-only thing)
+ if not num_ctx_cols and context._translate_colname:
+ self._keymap.update(
+ [
+ (
+ metadata_entry[MD_UNTRANSLATED],
+ self._keymap[metadata_entry[MD_LOOKUP_KEY]],
+ )
+ for metadata_entry in raw
+ if metadata_entry[MD_UNTRANSLATED]
+ ]
+ )
+
+ def _merge_cursor_description(
+ self,
+ context,
+ cursor_description,
+ result_columns,
+ num_ctx_cols,
+ cols_are_ordered,
+ textual_ordered,
+ loose_column_name_matching,
+ ):
+ """Merge a cursor.description with compiled result column information.
+
+ There are at least four separate strategies used here, selected
+ depending on the type of SQL construct used to start with.
+
+ The most common case is that of the compiled SQL expression construct,
+ which generated the column names present in the raw SQL string and
+ which has the identical number of columns as were reported by
+ cursor.description. In this case, we assume a 1-1 positional mapping
+ between the entries in cursor.description and the compiled object.
+ This is also the most performant case as we disregard extracting /
+ decoding the column names present in cursor.description since we
+ already have the desired name we generated in the compiled SQL
+ construct.
+
+ The next common case is that of the completely raw string SQL,
+ such as passed to connection.execute(). In this case we have no
+ compiled construct to work with, so we extract and decode the
+ names from cursor.description and index those as the primary
+ result row target keys.
+
+ The remaining fairly common case is that of the textual SQL
+ that includes at least partial column information; this is when
+ we use a :class:`_expression.TextualSelect` construct.
+ This construct may have
+ unordered or ordered column information. In the ordered case, we
+ merge the cursor.description and the compiled construct's information
+ positionally, and warn if there are additional description names
+ present, however we still decode the names in cursor.description
+ as we don't have a guarantee that the names in the columns match
+ on these. In the unordered case, we match names in cursor.description
+ to that of the compiled construct based on name matching.
+ In both of these cases, the cursor.description names and the column
+ expression objects and names are indexed as result row target keys.
+
+ The final case is much less common, where we have a compiled
+ non-textual SQL expression construct, but the number of columns
+ in cursor.description doesn't match what's in the compiled
+ construct. We make the guess here that there might be textual
+ column expressions in the compiled construct that themselves include
+ a comma in them causing them to split. We do the same name-matching
+ as with textual non-ordered columns.
+
+ The name-matched system of merging is the same as that used by
+ SQLAlchemy for all cases up through te 0.9 series. Positional
+ matching for compiled SQL expressions was introduced in 1.0 as a
+ major performance feature, and positional matching for textual
+ :class:`_expression.TextualSelect` objects in 1.1.
+ As name matching is no longer
+ a common case, it was acceptable to factor it into smaller generator-
+ oriented methods that are easier to understand, but incur slightly
+ more performance overhead.
+
+ """
+
+ case_sensitive = context.dialect.case_sensitive
+
+ if (
+ num_ctx_cols
+ and cols_are_ordered
+ and not textual_ordered
+ and num_ctx_cols == len(cursor_description)
+ ):
+ self._keys = [elem[0] for elem in result_columns]
+ # pure positional 1-1 case; doesn't need to read
+ # the names from cursor.description
+
+ # this metadata is safe to cache because we are guaranteed
+ # to have the columns in the same order for new executions
+ self._safe_for_cache = True
+ return [
+ (
+ idx,
+ idx,
+ rmap_entry[RM_OBJECTS],
+ rmap_entry[RM_NAME].lower()
+ if not case_sensitive
+ else rmap_entry[RM_NAME],
+ rmap_entry[RM_RENDERED_NAME],
+ context.get_result_processor(
+ rmap_entry[RM_TYPE],
+ rmap_entry[RM_RENDERED_NAME],
+ cursor_description[idx][1],
+ ),
+ None,
+ )
+ for idx, rmap_entry in enumerate(result_columns)
+ ]
+ else:
+
+ # name-based or text-positional cases, where we need
+ # to read cursor.description names
+
+ if textual_ordered:
+ self._safe_for_cache = True
+ # textual positional case
+ raw_iterator = self._merge_textual_cols_by_position(
+ context, cursor_description, result_columns
+ )
+ elif num_ctx_cols:
+ # compiled SQL with a mismatch of description cols
+ # vs. compiled cols, or textual w/ unordered columns
+ # the order of columns can change if the query is
+ # against a "select *", so not safe to cache
+ self._safe_for_cache = False
+ raw_iterator = self._merge_cols_by_name(
+ context,
+ cursor_description,
+ result_columns,
+ loose_column_name_matching,
+ )
+ else:
+ # no compiled SQL, just a raw string, order of columns
+ # can change for "select *"
+ self._safe_for_cache = False
+ raw_iterator = self._merge_cols_by_none(
+ context, cursor_description
+ )
+
+ return [
+ (
+ idx,
+ ridx,
+ obj,
+ cursor_colname,
+ cursor_colname,
+ context.get_result_processor(
+ mapped_type, cursor_colname, coltype
+ ),
+ untranslated,
+ )
+ for (
+ idx,
+ ridx,
+ cursor_colname,
+ mapped_type,
+ coltype,
+ obj,
+ untranslated,
+ ) in raw_iterator
+ ]
+
+ def _colnames_from_description(self, context, cursor_description):
+ """Extract column names and data types from a cursor.description.
+
+ Applies unicode decoding, column translation, "normalization",
+ and case sensitivity rules to the names based on the dialect.
+
+ """
+
+ dialect = context.dialect
+ case_sensitive = dialect.case_sensitive
+ translate_colname = context._translate_colname
+ description_decoder = (
+ dialect._description_decoder
+ if dialect.description_encoding
+ else None
+ )
+ normalize_name = (
+ dialect.normalize_name if dialect.requires_name_normalize else None
+ )
+ untranslated = None
+
+ self._keys = []
+
+ for idx, rec in enumerate(cursor_description):
+ colname = rec[0]
+ coltype = rec[1]
+
+ if description_decoder:
+ colname = description_decoder(colname)
+
+ if translate_colname:
+ colname, untranslated = translate_colname(colname)
+
+ if normalize_name:
+ colname = normalize_name(colname)
+
+ self._keys.append(colname)
+ if not case_sensitive:
+ colname = colname.lower()
+
+ yield idx, colname, untranslated, coltype
+
+ def _merge_textual_cols_by_position(
+ self, context, cursor_description, result_columns
+ ):
+ num_ctx_cols = len(result_columns) if result_columns else None
+
+ if num_ctx_cols > len(cursor_description):
+ util.warn(
+ "Number of columns in textual SQL (%d) is "
+ "smaller than number of columns requested (%d)"
+ % (num_ctx_cols, len(cursor_description))
+ )
+ seen = set()
+ for (
+ idx,
+ colname,
+ untranslated,
+ coltype,
+ ) in self._colnames_from_description(context, cursor_description):
+ if idx < num_ctx_cols:
+ ctx_rec = result_columns[idx]
+ obj = ctx_rec[RM_OBJECTS]
+ ridx = idx
+ mapped_type = ctx_rec[RM_TYPE]
+ if obj[0] in seen:
+ raise exc.InvalidRequestError(
+ "Duplicate column expression requested "
+ "in textual SQL: %r" % obj[0]
+ )
+ seen.add(obj[0])
+ else:
+ mapped_type = sqltypes.NULLTYPE
+ obj = None
+ ridx = None
+ yield idx, ridx, colname, mapped_type, coltype, obj, untranslated
+
+ def _merge_cols_by_name(
+ self,
+ context,
+ cursor_description,
+ result_columns,
+ loose_column_name_matching,
+ ):
+ dialect = context.dialect
+ case_sensitive = dialect.case_sensitive
+ match_map = self._create_description_match_map(
+ result_columns, case_sensitive, loose_column_name_matching
+ )
+ for (
+ idx,
+ colname,
+ untranslated,
+ coltype,
+ ) in self._colnames_from_description(context, cursor_description):
+ try:
+ ctx_rec = match_map[colname]
+ except KeyError:
+ mapped_type = sqltypes.NULLTYPE
+ obj = None
+ result_columns_idx = None
+ else:
+ obj = ctx_rec[1]
+ mapped_type = ctx_rec[2]
+ result_columns_idx = ctx_rec[3]
+ yield (
+ idx,
+ result_columns_idx,
+ colname,
+ mapped_type,
+ coltype,
+ obj,
+ untranslated,
+ )
+
+ @classmethod
+ def _create_description_match_map(
+ cls,
+ result_columns,
+ case_sensitive=True,
+ loose_column_name_matching=False,
+ ):
+ """when matching cursor.description to a set of names that are present
+ in a Compiled object, as is the case with TextualSelect, get all the
+ names we expect might match those in cursor.description.
+ """
+
+ d = {}
+ for ridx, elem in enumerate(result_columns):
+ key = elem[RM_RENDERED_NAME]
+
+ if not case_sensitive:
+ key = key.lower()
+ if key in d:
+ # conflicting keyname - just add the column-linked objects
+ # to the existing record. if there is a duplicate column
+ # name in the cursor description, this will allow all of those
+ # objects to raise an ambiguous column error
+ e_name, e_obj, e_type, e_ridx = d[key]
+ d[key] = e_name, e_obj + elem[RM_OBJECTS], e_type, ridx
+ else:
+ d[key] = (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE], ridx)
+
+ if loose_column_name_matching:
+ # when using a textual statement with an unordered set
+ # of columns that line up, we are expecting the user
+ # to be using label names in the SQL that match to the column
+ # expressions. Enable more liberal matching for this case;
+ # duplicate keys that are ambiguous will be fixed later.
+ for r_key in elem[RM_OBJECTS]:
+ d.setdefault(
+ r_key,
+ (elem[RM_NAME], elem[RM_OBJECTS], elem[RM_TYPE], ridx),
+ )
+
+ return d
+
+ def _merge_cols_by_none(self, context, cursor_description):
+ for (
+ idx,
+ colname,
+ untranslated,
+ coltype,
+ ) in self._colnames_from_description(context, cursor_description):
+ yield (
+ idx,
+ None,
+ colname,
+ sqltypes.NULLTYPE,
+ coltype,
+ None,
+ untranslated,
+ )
+
+ def _key_fallback(self, key, err, raiseerr=True):
+ if raiseerr:
+ util.raise_(
+ exc.NoSuchColumnError(
+ "Could not locate column in row for column '%s'"
+ % util.string_or_unprintable(key)
+ ),
+ replace_context=err,
+ )
+ else:
+ return None
+
+ def _raise_for_ambiguous_column_name(self, rec):
+ raise exc.InvalidRequestError(
+ "Ambiguous column name '%s' in "
+ "result set column descriptions" % rec[MD_LOOKUP_KEY]
+ )
+
+ def _index_for_key(self, key, raiseerr=True):
+ # TODO: can consider pre-loading ints and negative ints
+ # into _keymap - also no coverage here
+ if isinstance(key, int):
+ key = self._keys[key]
+
+ try:
+ rec = self._keymap[key]
+ except KeyError as ke:
+ rec = self._key_fallback(key, ke, raiseerr)
+ if rec is None:
+ return None
+
+ index = rec[0]
+
+ if index is None:
+ self._raise_for_ambiguous_column_name(rec)
+ return index
+
+ def _indexes_for_keys(self, keys):
+
+ try:
+ return [self._keymap[key][0] for key in keys]
+ except KeyError as ke:
+ # ensure it raises
+ CursorResultMetaData._key_fallback(self, ke.args[0], ke)
+
+ def _metadata_for_keys(self, keys):
+ for key in keys:
+ if int in key.__class__.__mro__:
+ key = self._keys[key]
+
+ try:
+ rec = self._keymap[key]
+ except KeyError as ke:
+ # ensure it raises
+ CursorResultMetaData._key_fallback(self, ke.args[0], ke)
+
+ index = rec[0]
+
+ if index is None:
+ self._raise_for_ambiguous_column_name(rec)
+
+ yield rec
+
+ def __getstate__(self):
+ return {
+ "_keymap": {
+ key: (rec[MD_INDEX], rec[MD_RESULT_MAP_INDEX], _UNPICKLED, key)
+ for key, rec in self._keymap.items()
+ if isinstance(key, util.string_types + util.int_types)
+ },
+ "_keys": self._keys,
+ "case_sensitive": self.case_sensitive,
+ "_translated_indexes": self._translated_indexes,
+ "_tuplefilter": self._tuplefilter,
+ }
+
+ def __setstate__(self, state):
+ self._processors = [None for _ in range(len(state["_keys"]))]
+ self._keymap = state["_keymap"]
+
+ self._keymap_by_result_column_idx = {
+ rec[MD_RESULT_MAP_INDEX]: rec for rec in self._keymap.values()
+ }
+ self._keys = state["_keys"]
+ self.case_sensitive = state["case_sensitive"]
+
+ if state["_translated_indexes"]:
+ self._translated_indexes = state["_translated_indexes"]
+ self._tuplefilter = tuplegetter(*self._translated_indexes)
+ else:
+ self._translated_indexes = self._tuplefilter = None
+
+
+class LegacyCursorResultMetaData(CursorResultMetaData):
+ __slots__ = ()
+
+ def _contains(self, value, row):
+ key = value
+ if key in self._keymap:
+ util.warn_deprecated_20(
+ "Using the 'in' operator to test for string or column "
+ "keys, or integer indexes, in a :class:`.Row` object is "
+ "deprecated and will "
+ "be removed in a future release. "
+ "Use the `Row._fields` or `Row._mapping` attribute, i.e. "
+ "'key in row._fields'",
+ )
+ return True
+ else:
+ return self._key_fallback(key, None, False) is not None
+
+ def _key_fallback(self, key, err, raiseerr=True):
+ map_ = self._keymap
+ result = None
+
+ if isinstance(key, util.string_types):
+ result = map_.get(key if self.case_sensitive else key.lower())
+ elif isinstance(key, expression.ColumnElement):
+ if (
+ key._tq_label
+ and (
+ key._tq_label
+ if self.case_sensitive
+ else key._tq_label.lower()
+ )
+ in map_
+ ):
+ result = map_[
+ key._tq_label
+ if self.case_sensitive
+ else key._tq_label.lower()
+ ]
+ elif (
+ hasattr(key, "name")
+ and (key.name if self.case_sensitive else key.name.lower())
+ in map_
+ ):
+ # match is only on name.
+ result = map_[
+ key.name if self.case_sensitive else key.name.lower()
+ ]
+
+ # search extra hard to make sure this
+ # isn't a column/label name overlap.
+ # this check isn't currently available if the row
+ # was unpickled.
+ if result is not None and result[MD_OBJECTS] not in (
+ None,
+ _UNPICKLED,
+ ):
+ for obj in result[MD_OBJECTS]:
+ if key._compare_name_for_result(obj):
+ break
+ else:
+ result = None
+ if result is not None:
+ if result[MD_OBJECTS] is _UNPICKLED:
+ util.warn_deprecated(
+ "Retrieving row values using Column objects from a "
+ "row that was unpickled is deprecated; adequate "
+ "state cannot be pickled for this to be efficient. "
+ "This usage will raise KeyError in a future release.",
+ version="1.4",
+ )
+ else:
+ util.warn_deprecated(
+ "Retrieving row values using Column objects with only "
+ "matching names as keys is deprecated, and will raise "
+ "KeyError in a future release; only Column "
+ "objects that are explicitly part of the statement "
+ "object should be used.",
+ version="1.4",
+ )
+ if result is None:
+ if raiseerr:
+ util.raise_(
+ exc.NoSuchColumnError(
+ "Could not locate column in row for column '%s'"
+ % util.string_or_unprintable(key)
+ ),
+ replace_context=err,
+ )
+ else:
+ return None
+ else:
+ map_[key] = result
+ return result
+
+ def _warn_for_nonint(self, key):
+ util.warn_deprecated_20(
+ "Using non-integer/slice indices on Row is deprecated and will "
+ "be removed in version 2.0; please use row._mapping[<key>], or "
+ "the mappings() accessor on the Result object.",
+ stacklevel=4,
+ )
+
+ def _has_key(self, key):
+ if key in self._keymap:
+ return True
+ else:
+ return self._key_fallback(key, None, False) is not None
+
+
+class ResultFetchStrategy(object):
+ """Define a fetching strategy for a result object.
+
+
+ .. versionadded:: 1.4
+
+ """
+
+ __slots__ = ()
+
+ alternate_cursor_description = None
+
+ def soft_close(self, result, dbapi_cursor):
+ raise NotImplementedError()
+
+ def hard_close(self, result, dbapi_cursor):
+ raise NotImplementedError()
+
+ def yield_per(self, result, dbapi_cursor, num):
+ return
+
+ def fetchone(self, result, dbapi_cursor, hard_close=False):
+ raise NotImplementedError()
+
+ def fetchmany(self, result, dbapi_cursor, size=None):
+ raise NotImplementedError()
+
+ def fetchall(self, result):
+ raise NotImplementedError()
+
+ def handle_exception(self, result, dbapi_cursor, err):
+ raise err
+
+
+class NoCursorFetchStrategy(ResultFetchStrategy):
+ """Cursor strategy for a result that has no open cursor.
+
+ There are two varieties of this strategy, one for DQL and one for
+ DML (and also DDL), each of which represent a result that had a cursor
+ but no longer has one.
+
+ """
+
+ __slots__ = ()
+
+ def soft_close(self, result, dbapi_cursor):
+ pass
+
+ def hard_close(self, result, dbapi_cursor):
+ pass
+
+ def fetchone(self, result, dbapi_cursor, hard_close=False):
+ return self._non_result(result, None)
+
+ def fetchmany(self, result, dbapi_cursor, size=None):
+ return self._non_result(result, [])
+
+ def fetchall(self, result, dbapi_cursor):
+ return self._non_result(result, [])
+
+ def _non_result(self, result, default, err=None):
+ raise NotImplementedError()
+
+
+class NoCursorDQLFetchStrategy(NoCursorFetchStrategy):
+ """Cursor strategy for a DQL result that has no open cursor.
+
+ This is a result set that can return rows, i.e. for a SELECT, or for an
+ INSERT, UPDATE, DELETE that includes RETURNING. However it is in the state
+ where the cursor is closed and no rows remain available. The owning result
+ object may or may not be "hard closed", which determines if the fetch
+ methods send empty results or raise for closed result.
+
+ """
+
+ __slots__ = ()
+
+ def _non_result(self, result, default, err=None):
+ if result.closed:
+ util.raise_(
+ exc.ResourceClosedError("This result object is closed."),
+ replace_context=err,
+ )
+ else:
+ return default
+
+
+_NO_CURSOR_DQL = NoCursorDQLFetchStrategy()
+
+
+class NoCursorDMLFetchStrategy(NoCursorFetchStrategy):
+ """Cursor strategy for a DML result that has no open cursor.
+
+ This is a result set that does not return rows, i.e. for an INSERT,
+ UPDATE, DELETE that does not include RETURNING.
+
+ """
+
+ __slots__ = ()
+
+ def _non_result(self, result, default, err=None):
+ # we only expect to have a _NoResultMetaData() here right now.
+ assert not result._metadata.returns_rows
+ result._metadata._we_dont_return_rows(err)
+
+
+_NO_CURSOR_DML = NoCursorDMLFetchStrategy()
+
+
+class CursorFetchStrategy(ResultFetchStrategy):
+ """Call fetch methods from a DBAPI cursor.
+
+ Alternate versions of this class may instead buffer the rows from
+ cursors or not use cursors at all.
+
+ """
+
+ __slots__ = ()
+
+ def soft_close(self, result, dbapi_cursor):
+ result.cursor_strategy = _NO_CURSOR_DQL
+
+ def hard_close(self, result, dbapi_cursor):
+ result.cursor_strategy = _NO_CURSOR_DQL
+
+ def handle_exception(self, result, dbapi_cursor, err):
+ result.connection._handle_dbapi_exception(
+ err, None, None, dbapi_cursor, result.context
+ )
+
+ def yield_per(self, result, dbapi_cursor, num):
+ result.cursor_strategy = BufferedRowCursorFetchStrategy(
+ dbapi_cursor,
+ {"max_row_buffer": num},
+ initial_buffer=collections.deque(),
+ growth_factor=0,
+ )
+
+ def fetchone(self, result, dbapi_cursor, hard_close=False):
+ try:
+ row = dbapi_cursor.fetchone()
+ if row is None:
+ result._soft_close(hard=hard_close)
+ return row
+ except BaseException as e:
+ self.handle_exception(result, dbapi_cursor, e)
+
+ def fetchmany(self, result, dbapi_cursor, size=None):
+ try:
+ if size is None:
+ l = dbapi_cursor.fetchmany()
+ else:
+ l = dbapi_cursor.fetchmany(size)
+
+ if not l:
+ result._soft_close()
+ return l
+ except BaseException as e:
+ self.handle_exception(result, dbapi_cursor, e)
+
+ def fetchall(self, result, dbapi_cursor):
+ try:
+ rows = dbapi_cursor.fetchall()
+ result._soft_close()
+ return rows
+ except BaseException as e:
+ self.handle_exception(result, dbapi_cursor, e)
+
+
+_DEFAULT_FETCH = CursorFetchStrategy()
+
+
+class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
+ """A cursor fetch strategy with row buffering behavior.
+
+ This strategy buffers the contents of a selection of rows
+ before ``fetchone()`` is called. This is to allow the results of
+ ``cursor.description`` to be available immediately, when
+ interfacing with a DB-API that requires rows to be consumed before
+ this information is available (currently psycopg2, when used with
+ server-side cursors).
+
+ The pre-fetching behavior fetches only one row initially, and then
+ grows its buffer size by a fixed amount with each successive need
+ for additional rows up the ``max_row_buffer`` size, which defaults
+ to 1000::
+
+ with psycopg2_engine.connect() as conn:
+
+ result = conn.execution_options(
+ stream_results=True, max_row_buffer=50
+ ).execute(text("select * from table"))
+
+ .. versionadded:: 1.4 ``max_row_buffer`` may now exceed 1000 rows.
+
+ .. seealso::
+
+ :ref:`psycopg2_execution_options`
+ """
+
+ __slots__ = ("_max_row_buffer", "_rowbuffer", "_bufsize", "_growth_factor")
+
+ def __init__(
+ self,
+ dbapi_cursor,
+ execution_options,
+ growth_factor=5,
+ initial_buffer=None,
+ ):
+ self._max_row_buffer = execution_options.get("max_row_buffer", 1000)
+
+ if initial_buffer is not None:
+ self._rowbuffer = initial_buffer
+ else:
+ self._rowbuffer = collections.deque(dbapi_cursor.fetchmany(1))
+ self._growth_factor = growth_factor
+
+ if growth_factor:
+ self._bufsize = min(self._max_row_buffer, self._growth_factor)
+ else:
+ self._bufsize = self._max_row_buffer
+
+ @classmethod
+ def create(cls, result):
+ return BufferedRowCursorFetchStrategy(
+ result.cursor,
+ result.context.execution_options,
+ )
+
+ def _buffer_rows(self, result, dbapi_cursor):
+ """this is currently used only by fetchone()."""
+
+ size = self._bufsize
+ try:
+ if size < 1:
+ new_rows = dbapi_cursor.fetchall()
+ else:
+ new_rows = dbapi_cursor.fetchmany(size)
+ except BaseException as e:
+ self.handle_exception(result, dbapi_cursor, e)
+
+ if not new_rows:
+ return
+ self._rowbuffer = collections.deque(new_rows)
+ if self._growth_factor and size < self._max_row_buffer:
+ self._bufsize = min(
+ self._max_row_buffer, size * self._growth_factor
+ )
+
+ def yield_per(self, result, dbapi_cursor, num):
+ self._growth_factor = 0
+ self._max_row_buffer = self._bufsize = num
+
+ def soft_close(self, result, dbapi_cursor):
+ self._rowbuffer.clear()
+ super(BufferedRowCursorFetchStrategy, self).soft_close(
+ result, dbapi_cursor
+ )
+
+ def hard_close(self, result, dbapi_cursor):
+ self._rowbuffer.clear()
+ super(BufferedRowCursorFetchStrategy, self).hard_close(
+ result, dbapi_cursor
+ )
+
+ def fetchone(self, result, dbapi_cursor, hard_close=False):
+ if not self._rowbuffer:
+ self._buffer_rows(result, dbapi_cursor)
+ if not self._rowbuffer:
+ try:
+ result._soft_close(hard=hard_close)
+ except BaseException as e:
+ self.handle_exception(result, dbapi_cursor, e)
+ return None
+ return self._rowbuffer.popleft()
+
+ def fetchmany(self, result, dbapi_cursor, size=None):
+ if size is None:
+ return self.fetchall(result, dbapi_cursor)
+
+ buf = list(self._rowbuffer)
+ lb = len(buf)
+ if size > lb:
+ try:
+ new = dbapi_cursor.fetchmany(size - lb)
+ except BaseException as e:
+ self.handle_exception(result, dbapi_cursor, e)
+ else:
+ if not new:
+ result._soft_close()
+ else:
+ buf.extend(new)
+
+ result = buf[0:size]
+ self._rowbuffer = collections.deque(buf[size:])
+ return result
+
+ def fetchall(self, result, dbapi_cursor):
+ try:
+ ret = list(self._rowbuffer) + list(dbapi_cursor.fetchall())
+ self._rowbuffer.clear()
+ result._soft_close()
+ return ret
+ except BaseException as e:
+ self.handle_exception(result, dbapi_cursor, e)
+
+
+class FullyBufferedCursorFetchStrategy(CursorFetchStrategy):
+ """A cursor strategy that buffers rows fully upon creation.
+
+ Used for operations where a result is to be delivered
+ after the database conversation can not be continued,
+ such as MSSQL INSERT...OUTPUT after an autocommit.
+
+ """
+
+ __slots__ = ("_rowbuffer", "alternate_cursor_description")
+
+ def __init__(
+ self, dbapi_cursor, alternate_description=None, initial_buffer=None
+ ):
+ self.alternate_cursor_description = alternate_description
+ if initial_buffer is not None:
+ self._rowbuffer = collections.deque(initial_buffer)
+ else:
+ self._rowbuffer = collections.deque(dbapi_cursor.fetchall())
+
+ def yield_per(self, result, dbapi_cursor, num):
+ pass
+
+ def soft_close(self, result, dbapi_cursor):
+ self._rowbuffer.clear()
+ super(FullyBufferedCursorFetchStrategy, self).soft_close(
+ result, dbapi_cursor
+ )
+
+ def hard_close(self, result, dbapi_cursor):
+ self._rowbuffer.clear()
+ super(FullyBufferedCursorFetchStrategy, self).hard_close(
+ result, dbapi_cursor
+ )
+
+ def fetchone(self, result, dbapi_cursor, hard_close=False):
+ if self._rowbuffer:
+ return self._rowbuffer.popleft()
+ else:
+ result._soft_close(hard=hard_close)
+ return None
+
+ def fetchmany(self, result, dbapi_cursor, size=None):
+ if size is None:
+ return self.fetchall(result, dbapi_cursor)
+
+ buf = list(self._rowbuffer)
+ rows = buf[0:size]
+ self._rowbuffer = collections.deque(buf[size:])
+ if not rows:
+ result._soft_close()
+ return rows
+
+ def fetchall(self, result, dbapi_cursor):
+ ret = self._rowbuffer
+ self._rowbuffer = collections.deque()
+ result._soft_close()
+ return ret
+
+
+class _NoResultMetaData(ResultMetaData):
+ __slots__ = ()
+
+ returns_rows = False
+
+ def _we_dont_return_rows(self, err=None):
+ util.raise_(
+ exc.ResourceClosedError(
+ "This result object does not return rows. "
+ "It has been closed automatically."
+ ),
+ replace_context=err,
+ )
+
+ def _index_for_key(self, keys, raiseerr):
+ self._we_dont_return_rows()
+
+ def _metadata_for_keys(self, key):
+ self._we_dont_return_rows()
+
+ def _reduce(self, keys):
+ self._we_dont_return_rows()
+
+ @property
+ def _keymap(self):
+ self._we_dont_return_rows()
+
+ @property
+ def keys(self):
+ self._we_dont_return_rows()
+
+
+class _LegacyNoResultMetaData(_NoResultMetaData):
+ @property
+ def keys(self):
+ util.warn_deprecated_20(
+ "Calling the .keys() method on a result set that does not return "
+ "rows is deprecated and will raise ResourceClosedError in "
+ "SQLAlchemy 2.0.",
+ )
+ return []
+
+
+_NO_RESULT_METADATA = _NoResultMetaData()
+_LEGACY_NO_RESULT_METADATA = _LegacyNoResultMetaData()
+
+
+class BaseCursorResult(object):
+ """Base class for database result objects."""
+
+ out_parameters = None
+ _metadata = None
+ _soft_closed = False
+ closed = False
+
+ def __init__(self, context, cursor_strategy, cursor_description):
+ self.context = context
+ self.dialect = context.dialect
+ self.cursor = context.cursor
+ self.cursor_strategy = cursor_strategy
+ self.connection = context.root_connection
+ self._echo = echo = (
+ self.connection._echo and context.engine._should_log_debug()
+ )
+
+ if cursor_description is not None:
+ # inline of Result._row_getter(), set up an initial row
+ # getter assuming no transformations will be called as this
+ # is the most common case
+
+ if echo:
+ log = self.context.connection._log_debug
+
+ def log_row(row):
+ log("Row %r", sql_util._repr_row(row))
+ return row
+
+ self._row_logging_fn = log_row
+ else:
+ log_row = None
+
+ metadata = self._init_metadata(context, cursor_description)
+
+ keymap = metadata._keymap
+ processors = metadata._processors
+ process_row = self._process_row
+ key_style = process_row._default_key_style
+ _make_row = functools.partial(
+ process_row, metadata, processors, keymap, key_style
+ )
+ if log_row:
+
+ def make_row(row):
+ made_row = _make_row(row)
+ log_row(made_row)
+ return made_row
+
+ else:
+ make_row = _make_row
+ self._set_memoized_attribute("_row_getter", make_row)
+
+ else:
+ self._metadata = self._no_result_metadata
+
+ def _init_metadata(self, context, cursor_description):
+
+ if context.compiled:
+ if context.compiled._cached_metadata:
+ metadata = self.context.compiled._cached_metadata
+ else:
+ metadata = self._cursor_metadata(self, cursor_description)
+ if metadata._safe_for_cache:
+ context.compiled._cached_metadata = metadata
+
+ # result rewrite/ adapt step. this is to suit the case
+ # when we are invoked against a cached Compiled object, we want
+ # to rewrite the ResultMetaData to reflect the Column objects
+ # that are in our current SQL statement object, not the one
+ # that is associated with the cached Compiled object.
+ # the Compiled object may also tell us to not
+ # actually do this step; this is to support the ORM where
+ # it is to produce a new Result object in any case, and will
+ # be using the cached Column objects against this database result
+ # so we don't want to rewrite them.
+ #
+ # Basically this step suits the use case where the end user
+ # is using Core SQL expressions and is accessing columns in the
+ # result row using row._mapping[table.c.column].
+ compiled = context.compiled
+ if (
+ compiled
+ and compiled._result_columns
+ and context.cache_hit is context.dialect.CACHE_HIT
+ and not context.execution_options.get(
+ "_result_disable_adapt_to_context", False
+ )
+ and compiled.statement is not context.invoked_statement
+ ):
+ metadata = metadata._adapt_to_context(context)
+
+ self._metadata = metadata
+
+ else:
+ self._metadata = metadata = self._cursor_metadata(
+ self, cursor_description
+ )
+ if self._echo:
+ context.connection._log_debug(
+ "Col %r", tuple(x[0] for x in cursor_description)
+ )
+ return metadata
+
+ def _soft_close(self, hard=False):
+ """Soft close this :class:`_engine.CursorResult`.
+
+ This releases all DBAPI cursor resources, but leaves the
+ CursorResult "open" from a semantic perspective, meaning the
+ fetchXXX() methods will continue to return empty results.
+
+ This method is called automatically when:
+
+ * all result rows are exhausted using the fetchXXX() methods.
+ * cursor.description is None.
+
+ This method is **not public**, but is documented in order to clarify
+ the "autoclose" process used.
+
+ .. versionadded:: 1.0.0
+
+ .. seealso::
+
+ :meth:`_engine.CursorResult.close`
+
+
+ """
+ if (not hard and self._soft_closed) or (hard and self.closed):
+ return
+
+ if hard:
+ self.closed = True
+ self.cursor_strategy.hard_close(self, self.cursor)
+ else:
+ self.cursor_strategy.soft_close(self, self.cursor)
+
+ if not self._soft_closed:
+ cursor = self.cursor
+ self.cursor = None
+ self.connection._safe_close_cursor(cursor)
+ self._soft_closed = True
+
+ @property
+ def inserted_primary_key_rows(self):
+ """Return the value of
+ :attr:`_engine.CursorResult.inserted_primary_key`
+ as a row contained within a list; some dialects may support a
+ multiple row form as well.
+
+ .. note:: As indicated below, in current SQLAlchemy versions this
+ accessor is only useful beyond what's already supplied by
+ :attr:`_engine.CursorResult.inserted_primary_key` when using the
+ :ref:`postgresql_psycopg2` dialect. Future versions hope to
+ generalize this feature to more dialects.
+
+ This accessor is added to support dialects that offer the feature
+ that is currently implemented by the :ref:`psycopg2_executemany_mode`
+ feature, currently **only the psycopg2 dialect**, which provides
+ for many rows to be INSERTed at once while still retaining the
+ behavior of being able to return server-generated primary key values.
+
+ * **When using the psycopg2 dialect, or other dialects that may support
+ "fast executemany" style inserts in upcoming releases** : When
+ invoking an INSERT statement while passing a list of rows as the
+ second argument to :meth:`_engine.Connection.execute`, this accessor
+ will then provide a list of rows, where each row contains the primary
+ key value for each row that was INSERTed.
+
+ * **When using all other dialects / backends that don't yet support
+ this feature**: This accessor is only useful for **single row INSERT
+ statements**, and returns the same information as that of the
+ :attr:`_engine.CursorResult.inserted_primary_key` within a
+ single-element list. When an INSERT statement is executed in
+ conjunction with a list of rows to be INSERTed, the list will contain
+ one row per row inserted in the statement, however it will contain
+ ``None`` for any server-generated values.
+
+ Future releases of SQLAlchemy will further generalize the
+ "fast execution helper" feature of psycopg2 to suit other dialects,
+ thus allowing this accessor to be of more general use.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`_engine.CursorResult.inserted_primary_key`
+
+ """
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled " "expression construct."
+ )
+ elif not self.context.isinsert:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() " "expression construct."
+ )
+ elif self.context._is_explicit_returning:
+ raise exc.InvalidRequestError(
+ "Can't call inserted_primary_key "
+ "when returning() "
+ "is used."
+ )
+ return self.context.inserted_primary_key_rows
+
+ @property
+ def inserted_primary_key(self):
+ """Return the primary key for the row just inserted.
+
+ The return value is a :class:`_result.Row` object representing
+ a named tuple of primary key values in the order in which the
+ primary key columns are configured in the source
+ :class:`_schema.Table`.
+
+ .. versionchanged:: 1.4.8 - the
+ :attr:`_engine.CursorResult.inserted_primary_key`
+ value is now a named tuple via the :class:`_result.Row` class,
+ rather than a plain tuple.
+
+ This accessor only applies to single row :func:`_expression.insert`
+ constructs which did not explicitly specify
+ :meth:`_expression.Insert.returning`. Support for multirow inserts,
+ while not yet available for most backends, would be accessed using
+ the :attr:`_engine.CursorResult.inserted_primary_key_rows` accessor.
+
+ Note that primary key columns which specify a server_default clause, or
+ otherwise do not qualify as "autoincrement" columns (see the notes at
+ :class:`_schema.Column`), and were generated using the database-side
+ default, will appear in this list as ``None`` unless the backend
+ supports "returning" and the insert statement executed with the
+ "implicit returning" enabled.
+
+ Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() construct.
+
+ """
+
+ if self.context.executemany:
+ raise exc.InvalidRequestError(
+ "This statement was an executemany call; if primary key "
+ "returning is supported, please "
+ "use .inserted_primary_key_rows."
+ )
+
+ ikp = self.inserted_primary_key_rows
+ if ikp:
+ return ikp[0]
+ else:
+ return None
+
+ def last_updated_params(self):
+ """Return the collection of updated parameters from this
+ execution.
+
+ Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an update() construct.
+
+ """
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled " "expression construct."
+ )
+ elif not self.context.isupdate:
+ raise exc.InvalidRequestError(
+ "Statement is not an update() " "expression construct."
+ )
+ elif self.context.executemany:
+ return self.context.compiled_parameters
+ else:
+ return self.context.compiled_parameters[0]
+
+ def last_inserted_params(self):
+ """Return the collection of inserted parameters from this
+ execution.
+
+ Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() construct.
+
+ """
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled " "expression construct."
+ )
+ elif not self.context.isinsert:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() " "expression construct."
+ )
+ elif self.context.executemany:
+ return self.context.compiled_parameters
+ else:
+ return self.context.compiled_parameters[0]
+
+ @property
+ def returned_defaults_rows(self):
+ """Return a list of rows each containing the values of default
+ columns that were fetched using
+ the :meth:`.ValuesBase.return_defaults` feature.
+
+ The return value is a list of :class:`.Row` objects.
+
+ .. versionadded:: 1.4
+
+ """
+ return self.context.returned_default_rows
+
+ @property
+ def returned_defaults(self):
+ """Return the values of default columns that were fetched using
+ the :meth:`.ValuesBase.return_defaults` feature.
+
+ The value is an instance of :class:`.Row`, or ``None``
+ if :meth:`.ValuesBase.return_defaults` was not used or if the
+ backend does not support RETURNING.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`.ValuesBase.return_defaults`
+
+ """
+
+ if self.context.executemany:
+ raise exc.InvalidRequestError(
+ "This statement was an executemany call; if return defaults "
+ "is supported, please use .returned_defaults_rows."
+ )
+
+ rows = self.context.returned_default_rows
+ if rows:
+ return rows[0]
+ else:
+ return None
+
+ def lastrow_has_defaults(self):
+ """Return ``lastrow_has_defaults()`` from the underlying
+ :class:`.ExecutionContext`.
+
+ See :class:`.ExecutionContext` for details.
+
+ """
+
+ return self.context.lastrow_has_defaults()
+
+ def postfetch_cols(self):
+ """Return ``postfetch_cols()`` from the underlying
+ :class:`.ExecutionContext`.
+
+ See :class:`.ExecutionContext` for details.
+
+ Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() or update() construct.
+
+ """
+
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled " "expression construct."
+ )
+ elif not self.context.isinsert and not self.context.isupdate:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() or update() "
+ "expression construct."
+ )
+ return self.context.postfetch_cols
+
+ def prefetch_cols(self):
+ """Return ``prefetch_cols()`` from the underlying
+ :class:`.ExecutionContext`.
+
+ See :class:`.ExecutionContext` for details.
+
+ Raises :class:`~sqlalchemy.exc.InvalidRequestError` if the executed
+ statement is not a compiled expression construct
+ or is not an insert() or update() construct.
+
+ """
+
+ if not self.context.compiled:
+ raise exc.InvalidRequestError(
+ "Statement is not a compiled " "expression construct."
+ )
+ elif not self.context.isinsert and not self.context.isupdate:
+ raise exc.InvalidRequestError(
+ "Statement is not an insert() or update() "
+ "expression construct."
+ )
+ return self.context.prefetch_cols
+
+ def supports_sane_rowcount(self):
+ """Return ``supports_sane_rowcount`` from the dialect.
+
+ See :attr:`_engine.CursorResult.rowcount` for background.
+
+ """
+
+ return self.dialect.supports_sane_rowcount
+
+ def supports_sane_multi_rowcount(self):
+ """Return ``supports_sane_multi_rowcount`` from the dialect.
+
+ See :attr:`_engine.CursorResult.rowcount` for background.
+
+ """
+
+ return self.dialect.supports_sane_multi_rowcount
+
+ @util.memoized_property
+ def rowcount(self):
+ """Return the 'rowcount' for this result.
+
+ The 'rowcount' reports the number of rows *matched*
+ by the WHERE criterion of an UPDATE or DELETE statement.
+
+ .. note::
+
+ Notes regarding :attr:`_engine.CursorResult.rowcount`:
+
+
+ * This attribute returns the number of rows *matched*,
+ which is not necessarily the same as the number of rows
+ that were actually *modified* - an UPDATE statement, for example,
+ may have no net change on a given row if the SET values
+ given are the same as those present in the row already.
+ Such a row would be matched but not modified.
+ On backends that feature both styles, such as MySQL,
+ rowcount is configured by default to return the match
+ count in all cases.
+
+ * :attr:`_engine.CursorResult.rowcount`
+ is *only* useful in conjunction
+ with an UPDATE or DELETE statement. Contrary to what the Python
+ DBAPI says, it does *not* return the
+ number of rows available from the results of a SELECT statement
+ as DBAPIs cannot support this functionality when rows are
+ unbuffered.
+
+ * :attr:`_engine.CursorResult.rowcount`
+ may not be fully implemented by
+ all dialects. In particular, most DBAPIs do not support an
+ aggregate rowcount result from an executemany call.
+ The :meth:`_engine.CursorResult.supports_sane_rowcount` and
+ :meth:`_engine.CursorResult.supports_sane_multi_rowcount` methods
+ will report from the dialect if each usage is known to be
+ supported.
+
+ * Statements that use RETURNING may not return a correct
+ rowcount.
+
+ .. seealso::
+
+ :ref:`tutorial_update_delete_rowcount` - in the :ref:`unified_tutorial`
+
+ """ # noqa: E501
+
+ try:
+ return self.context.rowcount
+ except BaseException as e:
+ self.cursor_strategy.handle_exception(self, self.cursor, e)
+
+ @property
+ def lastrowid(self):
+ """Return the 'lastrowid' accessor on the DBAPI cursor.
+
+ This is a DBAPI specific method and is only functional
+ for those backends which support it, for statements
+ where it is appropriate. It's behavior is not
+ consistent across backends.
+
+ Usage of this method is normally unnecessary when
+ using insert() expression constructs; the
+ :attr:`~CursorResult.inserted_primary_key` attribute provides a
+ tuple of primary key values for a newly inserted row,
+ regardless of database backend.
+
+ """
+ try:
+ return self.context.get_lastrowid()
+ except BaseException as e:
+ self.cursor_strategy.handle_exception(self, self.cursor, e)
+
+ @property
+ def returns_rows(self):
+ """True if this :class:`_engine.CursorResult` returns zero or more
+ rows.
+
+ I.e. if it is legal to call the methods
+ :meth:`_engine.CursorResult.fetchone`,
+ :meth:`_engine.CursorResult.fetchmany`
+ :meth:`_engine.CursorResult.fetchall`.
+
+ Overall, the value of :attr:`_engine.CursorResult.returns_rows` should
+ always be synonymous with whether or not the DBAPI cursor had a
+ ``.description`` attribute, indicating the presence of result columns,
+ noting that a cursor that returns zero rows still has a
+ ``.description`` if a row-returning statement was emitted.
+
+ This attribute should be True for all results that are against
+ SELECT statements, as well as for DML statements INSERT/UPDATE/DELETE
+ that use RETURNING. For INSERT/UPDATE/DELETE statements that were
+ not using RETURNING, the value will usually be False, however
+ there are some dialect-specific exceptions to this, such as when
+ using the MSSQL / pyodbc dialect a SELECT is emitted inline in
+ order to retrieve an inserted primary key value.
+
+
+ """
+ return self._metadata.returns_rows
+
+ @property
+ def is_insert(self):
+ """True if this :class:`_engine.CursorResult` is the result
+ of a executing an expression language compiled
+ :func:`_expression.insert` construct.
+
+ When True, this implies that the
+ :attr:`inserted_primary_key` attribute is accessible,
+ assuming the statement did not include
+ a user defined "returning" construct.
+
+ """
+ return self.context.isinsert
+
+
+class CursorResult(BaseCursorResult, Result):
+ """A Result that is representing state from a DBAPI cursor.
+
+ .. versionchanged:: 1.4 The :class:`.CursorResult` and
+ :class:`.LegacyCursorResult`
+ classes replace the previous :class:`.ResultProxy` interface.
+ These classes are based on the :class:`.Result` calling API
+ which provides an updated usage model and calling facade for
+ SQLAlchemy Core and SQLAlchemy ORM.
+
+ Returns database rows via the :class:`.Row` class, which provides
+ additional API features and behaviors on top of the raw data returned by
+ the DBAPI. Through the use of filters such as the :meth:`.Result.scalars`
+ method, other kinds of objects may also be returned.
+
+ Within the scope of the 1.x series of SQLAlchemy, Core SQL results in
+ version 1.4 return an instance of :class:`._engine.LegacyCursorResult`
+ which takes the place of the ``CursorResult`` class used for the 1.3 series
+ and previously. This object returns rows as :class:`.LegacyRow` objects,
+ which maintains Python mapping (i.e. dictionary) like behaviors upon the
+ object itself. Going forward, the :attr:`.Row._mapping` attribute should
+ be used for dictionary behaviors.
+
+ .. seealso::
+
+ :ref:`coretutorial_selecting` - introductory material for accessing
+ :class:`_engine.CursorResult` and :class:`.Row` objects.
+
+ """
+
+ _cursor_metadata = CursorResultMetaData
+ _cursor_strategy_cls = CursorFetchStrategy
+ _no_result_metadata = _NO_RESULT_METADATA
+ _is_cursor = True
+
+ def _fetchiter_impl(self):
+ fetchone = self.cursor_strategy.fetchone
+
+ while True:
+ row = fetchone(self, self.cursor)
+ if row is None:
+ break
+ yield row
+
+ def _fetchone_impl(self, hard_close=False):
+ return self.cursor_strategy.fetchone(self, self.cursor, hard_close)
+
+ def _fetchall_impl(self):
+ return self.cursor_strategy.fetchall(self, self.cursor)
+
+ def _fetchmany_impl(self, size=None):
+ return self.cursor_strategy.fetchmany(self, self.cursor, size)
+
+ def _raw_row_iterator(self):
+ return self._fetchiter_impl()
+
+ def merge(self, *others):
+ merged_result = super(CursorResult, self).merge(*others)
+ setup_rowcounts = not self._metadata.returns_rows
+ if setup_rowcounts:
+ merged_result.rowcount = sum(
+ result.rowcount for result in (self,) + others
+ )
+ return merged_result
+
+ def close(self):
+ """Close this :class:`_engine.CursorResult`.
+
+ This closes out the underlying DBAPI cursor corresponding to the
+ statement execution, if one is still present. Note that the DBAPI
+ cursor is automatically released when the :class:`_engine.CursorResult`
+ exhausts all available rows. :meth:`_engine.CursorResult.close` is
+ generally an optional method except in the case when discarding a
+ :class:`_engine.CursorResult` that still has additional rows pending
+ for fetch.
+
+ After this method is called, it is no longer valid to call upon
+ the fetch methods, which will raise a :class:`.ResourceClosedError`
+ on subsequent use.
+
+ .. seealso::
+
+ :ref:`connections_toplevel`
+
+ """
+ self._soft_close(hard=True)
+
+ @_generative
+ def yield_per(self, num):
+ self._yield_per = num
+ self.cursor_strategy.yield_per(self, self.cursor, num)
+
+
+class LegacyCursorResult(CursorResult):
+ """Legacy version of :class:`.CursorResult`.
+
+ This class includes connection "connection autoclose" behavior for use with
+ "connectionless" execution, as well as delivers rows using the
+ :class:`.LegacyRow` row implementation.
+
+ .. versionadded:: 1.4
+
+ """
+
+ _autoclose_connection = False
+ _process_row = LegacyRow
+ _cursor_metadata = LegacyCursorResultMetaData
+ _cursor_strategy_cls = CursorFetchStrategy
+
+ _no_result_metadata = _LEGACY_NO_RESULT_METADATA
+
+ def close(self):
+ """Close this :class:`_engine.LegacyCursorResult`.
+
+ This method has the same behavior as that of
+ :meth:`._engine.CursorResult`, but it also may close
+ the underlying :class:`.Connection` for the case of "connectionless"
+ execution.
+
+ .. deprecated:: 2.0 "connectionless" execution is deprecated and will
+ be removed in version 2.0. Version 2.0 will feature the
+ :class:`_future.Result`
+ object that will no longer affect the status
+ of the originating connection in any case.
+
+ After this method is called, it is no longer valid to call upon
+ the fetch methods, which will raise a :class:`.ResourceClosedError`
+ on subsequent use.
+
+ .. seealso::
+
+ :ref:`connections_toplevel`
+
+ :ref:`dbengine_implicit`
+ """
+ self._soft_close(hard=True)
+
+ def _soft_close(self, hard=False):
+ soft_closed = self._soft_closed
+ super(LegacyCursorResult, self)._soft_close(hard=hard)
+ if (
+ not soft_closed
+ and self._soft_closed
+ and self._autoclose_connection
+ ):
+ self.connection.close()
+
+
+ResultProxy = LegacyCursorResult
+
+
+class BufferedRowResultProxy(ResultProxy):
+ """A ResultProxy with row buffering behavior.
+
+ .. deprecated:: 1.4 this class is now supplied using a strategy object.
+ See :class:`.BufferedRowCursorFetchStrategy`.
+
+ """
+
+ _cursor_strategy_cls = BufferedRowCursorFetchStrategy
+
+
+class FullyBufferedResultProxy(ResultProxy):
+ """A result proxy that buffers rows fully upon creation.
+
+ .. deprecated:: 1.4 this class is now supplied using a strategy object.
+ See :class:`.FullyBufferedCursorFetchStrategy`.
+
+ """
+
+ _cursor_strategy_cls = FullyBufferedCursorFetchStrategy
+
+
+class BufferedColumnRow(LegacyRow):
+ """Row is now BufferedColumn in all cases"""
+
+
+class BufferedColumnResultProxy(ResultProxy):
+ """A ResultProxy with column buffering behavior.
+
+ .. versionchanged:: 1.4 This is now the default behavior of the Row
+ and this class does not change behavior in any way.
+
+ """
+
+ _process_row = BufferedColumnRow
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
new file mode 100644
index 0000000..268a2d6
--- /dev/null
+++ b/lib/sqlalchemy/engine/default.py
@@ -0,0 +1,1936 @@
+# engine/default.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Default implementations of per-dialect sqlalchemy.engine classes.
+
+These are semi-private implementation classes which are only of importance
+to database dialect authors; dialects will usually use the classes here
+as the base class for their own corresponding classes.
+
+"""
+
+import codecs
+import functools
+import random
+import re
+import weakref
+
+from . import characteristics
+from . import cursor as _cursor
+from . import interfaces
+from .base import Connection
+from .. import event
+from .. import exc
+from .. import pool
+from .. import processors
+from .. import types as sqltypes
+from .. import util
+from ..sql import compiler
+from ..sql import expression
+from ..sql.elements import quoted_name
+
+AUTOCOMMIT_REGEXP = re.compile(
+ r"\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)", re.I | re.UNICODE
+)
+
+# When we're handed literal SQL, ensure it's a SELECT query
+SERVER_SIDE_CURSOR_RE = re.compile(r"\s*SELECT", re.I | re.UNICODE)
+
+
+CACHE_HIT = util.symbol("CACHE_HIT")
+CACHE_MISS = util.symbol("CACHE_MISS")
+CACHING_DISABLED = util.symbol("CACHING_DISABLED")
+NO_CACHE_KEY = util.symbol("NO_CACHE_KEY")
+NO_DIALECT_SUPPORT = util.symbol("NO_DIALECT_SUPPORT")
+
+
+class DefaultDialect(interfaces.Dialect):
+ """Default implementation of Dialect"""
+
+ statement_compiler = compiler.SQLCompiler
+ ddl_compiler = compiler.DDLCompiler
+ type_compiler = compiler.GenericTypeCompiler
+ preparer = compiler.IdentifierPreparer
+ supports_alter = True
+ supports_comments = False
+ inline_comments = False
+ use_setinputsizes = False
+ supports_statement_cache = True
+
+ # the first value we'd get for an autoincrement
+ # column.
+ default_sequence_base = 1
+
+ # most DBAPIs happy with this for execute().
+ # not cx_oracle.
+ execute_sequence_format = tuple
+
+ supports_schemas = True
+ supports_views = True
+ supports_sequences = False
+ sequences_optional = False
+ preexecute_autoincrement_sequences = False
+ supports_identity_columns = False
+ postfetch_lastrowid = True
+ implicit_returning = False
+ full_returning = False
+ insert_executemany_returning = False
+
+ cte_follows_insert = False
+
+ supports_native_enum = False
+ supports_native_boolean = False
+ non_native_boolean_check_constraint = True
+
+ supports_simple_order_by_label = True
+
+ tuple_in_values = False
+
+ connection_characteristics = util.immutabledict(
+ {"isolation_level": characteristics.IsolationLevelCharacteristic()}
+ )
+
+ engine_config_types = util.immutabledict(
+ [
+ ("convert_unicode", util.bool_or_str("force")),
+ ("pool_timeout", util.asint),
+ ("echo", util.bool_or_str("debug")),
+ ("echo_pool", util.bool_or_str("debug")),
+ ("pool_recycle", util.asint),
+ ("pool_size", util.asint),
+ ("max_overflow", util.asint),
+ ("future", util.asbool),
+ ]
+ )
+
+ # if the NUMERIC type
+ # returns decimal.Decimal.
+ # *not* the FLOAT type however.
+ supports_native_decimal = False
+
+ if util.py3k:
+ supports_unicode_statements = True
+ supports_unicode_binds = True
+ returns_unicode_strings = sqltypes.String.RETURNS_UNICODE
+ description_encoding = None
+ else:
+ supports_unicode_statements = False
+ supports_unicode_binds = False
+ returns_unicode_strings = sqltypes.String.RETURNS_UNKNOWN
+ description_encoding = "use_encoding"
+
+ name = "default"
+
+ # length at which to truncate
+ # any identifier.
+ max_identifier_length = 9999
+ _user_defined_max_identifier_length = None
+
+ isolation_level = None
+
+ # sub-categories of max_identifier_length.
+ # currently these accommodate for MySQL which allows alias names
+ # of 255 but DDL names only of 64.
+ max_index_name_length = None
+ max_constraint_name_length = None
+
+ supports_sane_rowcount = True
+ supports_sane_multi_rowcount = True
+ colspecs = {}
+ default_paramstyle = "named"
+
+ supports_default_values = False
+ """dialect supports INSERT... DEFAULT VALUES syntax"""
+
+ supports_default_metavalue = False
+ """dialect supports INSERT... VALUES (DEFAULT) syntax"""
+
+ # not sure if this is a real thing but the compiler will deliver it
+ # if this is the only flag enabled.
+ supports_empty_insert = True
+ """dialect supports INSERT () VALUES ()"""
+
+ supports_multivalues_insert = False
+
+ supports_is_distinct_from = True
+
+ supports_server_side_cursors = False
+
+ server_side_cursors = False
+
+ # extra record-level locking features (#4860)
+ supports_for_update_of = False
+
+ server_version_info = None
+
+ default_schema_name = None
+
+ construct_arguments = None
+ """Optional set of argument specifiers for various SQLAlchemy
+ constructs, typically schema items.
+
+ To implement, establish as a series of tuples, as in::
+
+ construct_arguments = [
+ (schema.Index, {
+ "using": False,
+ "where": None,
+ "ops": None
+ })
+ ]
+
+ If the above construct is established on the PostgreSQL dialect,
+ the :class:`.Index` construct will now accept the keyword arguments
+ ``postgresql_using``, ``postgresql_where``, nad ``postgresql_ops``.
+ Any other argument specified to the constructor of :class:`.Index`
+ which is prefixed with ``postgresql_`` will raise :class:`.ArgumentError`.
+
+ A dialect which does not include a ``construct_arguments`` member will
+ not participate in the argument validation system. For such a dialect,
+ any argument name is accepted by all participating constructs, within
+ the namespace of arguments prefixed with that dialect name. The rationale
+ here is so that third-party dialects that haven't yet implemented this
+ feature continue to function in the old way.
+
+ .. versionadded:: 0.9.2
+
+ .. seealso::
+
+ :class:`.DialectKWArgs` - implementing base class which consumes
+ :attr:`.DefaultDialect.construct_arguments`
+
+
+ """
+
+ # indicates symbol names are
+ # UPPERCASEd if they are case insensitive
+ # within the database.
+ # if this is True, the methods normalize_name()
+ # and denormalize_name() must be provided.
+ requires_name_normalize = False
+
+ reflection_options = ()
+
+ dbapi_exception_translation_map = util.immutabledict()
+ """mapping used in the extremely unusual case that a DBAPI's
+ published exceptions don't actually have the __name__ that they
+ are linked towards.
+
+ .. versionadded:: 1.0.5
+
+ """
+
+ is_async = False
+
+ CACHE_HIT = CACHE_HIT
+ CACHE_MISS = CACHE_MISS
+ CACHING_DISABLED = CACHING_DISABLED
+ NO_CACHE_KEY = NO_CACHE_KEY
+ NO_DIALECT_SUPPORT = NO_DIALECT_SUPPORT
+
+ @util.deprecated_params(
+ convert_unicode=(
+ "1.3",
+ "The :paramref:`_sa.create_engine.convert_unicode` parameter "
+ "and corresponding dialect-level parameters are deprecated, "
+ "and will be removed in a future release. Modern DBAPIs support "
+ "Python Unicode natively and this parameter is unnecessary.",
+ ),
+ empty_in_strategy=(
+ "1.4",
+ "The :paramref:`_sa.create_engine.empty_in_strategy` keyword is "
+ "deprecated, and no longer has any effect. All IN expressions "
+ "are now rendered using "
+ 'the "expanding parameter" strategy which renders a set of bound'
+ 'expressions, or an "empty set" SELECT, at statement execution'
+ "time.",
+ ),
+ case_sensitive=(
+ "1.4",
+ "The :paramref:`_sa.create_engine.case_sensitive` parameter "
+ "is deprecated and will be removed in a future release. "
+ "Applications should work with result column names in a case "
+ "sensitive fashion.",
+ ),
+ server_side_cursors=(
+ "1.4",
+ "The :paramref:`_sa.create_engine.server_side_cursors` parameter "
+ "is deprecated and will be removed in a future release. Please "
+ "use the "
+ ":paramref:`_engine.Connection.execution_options.stream_results` "
+ "parameter.",
+ ),
+ )
+ def __init__(
+ self,
+ convert_unicode=False,
+ encoding="utf-8",
+ paramstyle=None,
+ dbapi=None,
+ implicit_returning=None,
+ case_sensitive=True,
+ supports_native_boolean=None,
+ max_identifier_length=None,
+ label_length=None,
+ # int() is because the @deprecated_params decorator cannot accommodate
+ # the direct reference to the "NO_LINTING" object
+ compiler_linting=int(compiler.NO_LINTING),
+ server_side_cursors=False,
+ **kwargs
+ ):
+
+ if not getattr(self, "ported_sqla_06", True):
+ util.warn(
+ "The %s dialect is not yet ported to the 0.6 format"
+ % self.name
+ )
+
+ if server_side_cursors:
+ if not self.supports_server_side_cursors:
+ raise exc.ArgumentError(
+ "Dialect %s does not support server side cursors" % self
+ )
+ else:
+ self.server_side_cursors = True
+
+ self.convert_unicode = convert_unicode
+ self.encoding = encoding
+ self.positional = False
+ self._ischema = None
+ self.dbapi = dbapi
+ if paramstyle is not None:
+ self.paramstyle = paramstyle
+ elif self.dbapi is not None:
+ self.paramstyle = self.dbapi.paramstyle
+ else:
+ self.paramstyle = self.default_paramstyle
+ if implicit_returning is not None:
+ self.implicit_returning = implicit_returning
+ self.positional = self.paramstyle in ("qmark", "format", "numeric")
+ self.identifier_preparer = self.preparer(self)
+ self.type_compiler = self.type_compiler(self)
+ if supports_native_boolean is not None:
+ self.supports_native_boolean = supports_native_boolean
+ self.case_sensitive = case_sensitive
+
+ self._user_defined_max_identifier_length = max_identifier_length
+ if self._user_defined_max_identifier_length:
+ self.max_identifier_length = (
+ self._user_defined_max_identifier_length
+ )
+ self.label_length = label_length
+ self.compiler_linting = compiler_linting
+ if self.description_encoding == "use_encoding":
+ self._description_decoder = (
+ processors.to_unicode_processor_factory
+ )(encoding)
+ elif self.description_encoding is not None:
+ self._description_decoder = (
+ processors.to_unicode_processor_factory
+ )(self.description_encoding)
+ self._encoder = codecs.getencoder(self.encoding)
+ self._decoder = processors.to_unicode_processor_factory(self.encoding)
+
+ def _ensure_has_table_connection(self, arg):
+
+ if not isinstance(arg, Connection):
+ raise exc.ArgumentError(
+ "The argument passed to Dialect.has_table() should be a "
+ "%s, got %s. "
+ "Additionally, the Dialect.has_table() method is for "
+ "internal dialect "
+ "use only; please use "
+ "``inspect(some_engine).has_table(<tablename>>)`` "
+ "for public API use." % (Connection, type(arg))
+ )
+
+ @util.memoized_property
+ def _supports_statement_cache(self):
+ ssc = self.__class__.__dict__.get("supports_statement_cache", None)
+ if ssc is None:
+ util.warn(
+ "Dialect %s:%s will not make use of SQL compilation caching "
+ "as it does not set the 'supports_statement_cache' attribute "
+ "to ``True``. This can have "
+ "significant performance implications including some "
+ "performance degradations in comparison to prior SQLAlchemy "
+ "versions. Dialect maintainers should seek to set this "
+ "attribute to True after appropriate development and testing "
+ "for SQLAlchemy 1.4 caching support. Alternatively, this "
+ "attribute may be set to False which will disable this "
+ "warning." % (self.name, self.driver),
+ code="cprf",
+ )
+
+ return bool(ssc)
+
+ @util.memoized_property
+ def _type_memos(self):
+ return weakref.WeakKeyDictionary()
+
+ @property
+ def dialect_description(self):
+ return self.name + "+" + self.driver
+
+ @property
+ def supports_sane_rowcount_returning(self):
+ """True if this dialect supports sane rowcount even if RETURNING is
+ in use.
+
+ For dialects that don't support RETURNING, this is synonymous with
+ ``supports_sane_rowcount``.
+
+ """
+ return self.supports_sane_rowcount
+
+ @classmethod
+ def get_pool_class(cls, url):
+ return getattr(cls, "poolclass", pool.QueuePool)
+
+ def get_dialect_pool_class(self, url):
+ return self.get_pool_class(url)
+
+ @classmethod
+ def load_provisioning(cls):
+ package = ".".join(cls.__module__.split(".")[0:-1])
+ try:
+ __import__(package + ".provision")
+ except ImportError:
+ pass
+
+ def initialize(self, connection):
+ try:
+ self.server_version_info = self._get_server_version_info(
+ connection
+ )
+ except NotImplementedError:
+ self.server_version_info = None
+ try:
+ self.default_schema_name = self._get_default_schema_name(
+ connection
+ )
+ except NotImplementedError:
+ self.default_schema_name = None
+
+ try:
+ self.default_isolation_level = self.get_default_isolation_level(
+ connection.connection
+ )
+ except NotImplementedError:
+ self.default_isolation_level = None
+
+ if self.returns_unicode_strings is sqltypes.String.RETURNS_UNKNOWN:
+ if util.py3k:
+ raise exc.InvalidRequestError(
+ "RETURNS_UNKNOWN is unsupported in Python 3"
+ )
+ self.returns_unicode_strings = self._check_unicode_returns(
+ connection
+ )
+
+ if (
+ self.description_encoding is not None
+ and self._check_unicode_description(connection)
+ ):
+ self._description_decoder = self.description_encoding = None
+
+ if not self._user_defined_max_identifier_length:
+ max_ident_length = self._check_max_identifier_length(connection)
+ if max_ident_length:
+ self.max_identifier_length = max_ident_length
+
+ if (
+ self.label_length
+ and self.label_length > self.max_identifier_length
+ ):
+ raise exc.ArgumentError(
+ "Label length of %d is greater than this dialect's"
+ " maximum identifier length of %d"
+ % (self.label_length, self.max_identifier_length)
+ )
+
+ def on_connect(self):
+ # inherits the docstring from interfaces.Dialect.on_connect
+ return None
+
+ def _check_max_identifier_length(self, connection):
+ """Perform a connection / server version specific check to determine
+ the max_identifier_length.
+
+ If the dialect's class level max_identifier_length should be used,
+ can return None.
+
+ .. versionadded:: 1.3.9
+
+ """
+ return None
+
+ def get_default_isolation_level(self, dbapi_conn):
+ """Given a DBAPI connection, return its isolation level, or
+ a default isolation level if one cannot be retrieved.
+
+ May be overridden by subclasses in order to provide a
+ "fallback" isolation level for databases that cannot reliably
+ retrieve the actual isolation level.
+
+ By default, calls the :meth:`_engine.Interfaces.get_isolation_level`
+ method, propagating any exceptions raised.
+
+ .. versionadded:: 1.3.22
+
+ """
+ return self.get_isolation_level(dbapi_conn)
+
+ def _check_unicode_returns(self, connection, additional_tests=None):
+ # this now runs in py2k only and will be removed in 2.0; disabled for
+ # Python 3 in all cases under #5315
+ if util.py2k and not self.supports_unicode_statements:
+ cast_to = util.binary_type
+ else:
+ cast_to = util.text_type
+
+ if self.positional:
+ parameters = self.execute_sequence_format()
+ else:
+ parameters = {}
+
+ def check_unicode(test):
+ statement = cast_to(expression.select(test).compile(dialect=self))
+ try:
+ cursor = connection.connection.cursor()
+ connection._cursor_execute(cursor, statement, parameters)
+ row = cursor.fetchone()
+ cursor.close()
+ except exc.DBAPIError as de:
+ # note that _cursor_execute() will have closed the cursor
+ # if an exception is thrown.
+ util.warn(
+ "Exception attempting to "
+ "detect unicode returns: %r" % de
+ )
+ return False
+ else:
+ return isinstance(row[0], util.text_type)
+
+ tests = [
+ # detect plain VARCHAR
+ expression.cast(
+ expression.literal_column("'test plain returns'"),
+ sqltypes.VARCHAR(60),
+ ),
+ # detect if there's an NVARCHAR type with different behavior
+ # available
+ expression.cast(
+ expression.literal_column("'test unicode returns'"),
+ sqltypes.Unicode(60),
+ ),
+ ]
+
+ if additional_tests:
+ tests += additional_tests
+
+ results = {check_unicode(test) for test in tests}
+
+ if results.issuperset([True, False]):
+ return sqltypes.String.RETURNS_CONDITIONAL
+ else:
+ return (
+ sqltypes.String.RETURNS_UNICODE
+ if results == {True}
+ else sqltypes.String.RETURNS_BYTES
+ )
+
+ def _check_unicode_description(self, connection):
+ # all DBAPIs on Py2K return cursor.description as encoded
+
+ if util.py2k and not self.supports_unicode_statements:
+ cast_to = util.binary_type
+ else:
+ cast_to = util.text_type
+
+ cursor = connection.connection.cursor()
+ try:
+ cursor.execute(
+ cast_to(
+ expression.select(
+ expression.literal_column("'x'").label("some_label")
+ ).compile(dialect=self)
+ )
+ )
+ return isinstance(cursor.description[0][0], util.text_type)
+ finally:
+ cursor.close()
+
+ def type_descriptor(self, typeobj):
+ """Provide a database-specific :class:`.TypeEngine` object, given
+ the generic object which comes from the types module.
+
+ This method looks for a dictionary called
+ ``colspecs`` as a class or instance-level variable,
+ and passes on to :func:`_types.adapt_type`.
+
+ """
+ return sqltypes.adapt_type(typeobj, self.colspecs)
+
+ def has_index(self, connection, table_name, index_name, schema=None):
+ if not self.has_table(connection, table_name, schema=schema):
+ return False
+ for idx in self.get_indexes(connection, table_name, schema=schema):
+ if idx["name"] == index_name:
+ return True
+ else:
+ return False
+
+ def validate_identifier(self, ident):
+ if len(ident) > self.max_identifier_length:
+ raise exc.IdentifierError(
+ "Identifier '%s' exceeds maximum length of %d characters"
+ % (ident, self.max_identifier_length)
+ )
+
+ def connect(self, *cargs, **cparams):
+ # inherits the docstring from interfaces.Dialect.connect
+ return self.dbapi.connect(*cargs, **cparams)
+
+ def create_connect_args(self, url):
+ # inherits the docstring from interfaces.Dialect.create_connect_args
+ opts = url.translate_connect_args()
+ opts.update(url.query)
+ return [[], opts]
+
+ def set_engine_execution_options(self, engine, opts):
+ supported_names = set(self.connection_characteristics).intersection(
+ opts
+ )
+ if supported_names:
+ characteristics = util.immutabledict(
+ (name, opts[name]) for name in supported_names
+ )
+
+ @event.listens_for(engine, "engine_connect")
+ def set_connection_characteristics(connection, branch):
+ if not branch:
+ self._set_connection_characteristics(
+ connection, characteristics
+ )
+
+ def set_connection_execution_options(self, connection, opts):
+ supported_names = set(self.connection_characteristics).intersection(
+ opts
+ )
+ if supported_names:
+ characteristics = util.immutabledict(
+ (name, opts[name]) for name in supported_names
+ )
+ self._set_connection_characteristics(connection, characteristics)
+
+ def _set_connection_characteristics(self, connection, characteristics):
+
+ characteristic_values = [
+ (name, self.connection_characteristics[name], value)
+ for name, value in characteristics.items()
+ ]
+
+ if connection.in_transaction():
+ trans_objs = [
+ (name, obj)
+ for name, obj, value in characteristic_values
+ if obj.transactional
+ ]
+ if trans_objs:
+ if connection._is_future:
+ raise exc.InvalidRequestError(
+ "This connection has already initialized a SQLAlchemy "
+ "Transaction() object via begin() or autobegin; "
+ "%s may not be altered unless rollback() or commit() "
+ "is called first."
+ % (", ".join(name for name, obj in trans_objs))
+ )
+ else:
+ util.warn(
+ "Connection is already established with a "
+ "Transaction; "
+ "setting %s may implicitly rollback or "
+ "commit "
+ "the existing transaction, or have no effect until "
+ "next transaction"
+ % (", ".join(name for name, obj in trans_objs))
+ )
+
+ dbapi_connection = connection.connection.dbapi_connection
+ for name, characteristic, value in characteristic_values:
+ characteristic.set_characteristic(self, dbapi_connection, value)
+ connection.connection._connection_record.finalize_callback.append(
+ functools.partial(self._reset_characteristics, characteristics)
+ )
+
+ def _reset_characteristics(self, characteristics, dbapi_connection):
+ for characteristic_name in characteristics:
+ characteristic = self.connection_characteristics[
+ characteristic_name
+ ]
+ characteristic.reset_characteristic(self, dbapi_connection)
+
+ def do_begin(self, dbapi_connection):
+ pass
+
+ def do_rollback(self, dbapi_connection):
+ dbapi_connection.rollback()
+
+ def do_commit(self, dbapi_connection):
+ dbapi_connection.commit()
+
+ def do_close(self, dbapi_connection):
+ dbapi_connection.close()
+
+ @util.memoized_property
+ def _dialect_specific_select_one(self):
+ return str(expression.select(1).compile(dialect=self))
+
+ def do_ping(self, dbapi_connection):
+ cursor = None
+ try:
+ cursor = dbapi_connection.cursor()
+ try:
+ cursor.execute(self._dialect_specific_select_one)
+ finally:
+ cursor.close()
+ except self.dbapi.Error as err:
+ if self.is_disconnect(err, dbapi_connection, cursor):
+ return False
+ else:
+ raise
+ else:
+ return True
+
+ def create_xid(self):
+ """Create a random two-phase transaction ID.
+
+ This id will be passed to do_begin_twophase(), do_rollback_twophase(),
+ do_commit_twophase(). Its format is unspecified.
+ """
+
+ return "_sa_%032x" % random.randint(0, 2 ** 128)
+
+ def do_savepoint(self, connection, name):
+ connection.execute(expression.SavepointClause(name))
+
+ def do_rollback_to_savepoint(self, connection, name):
+ connection.execute(expression.RollbackToSavepointClause(name))
+
+ def do_release_savepoint(self, connection, name):
+ connection.execute(expression.ReleaseSavepointClause(name))
+
+ def do_executemany(self, cursor, statement, parameters, context=None):
+ cursor.executemany(statement, parameters)
+
+ def do_execute(self, cursor, statement, parameters, context=None):
+ cursor.execute(statement, parameters)
+
+ def do_execute_no_params(self, cursor, statement, context=None):
+ cursor.execute(statement)
+
+ def is_disconnect(self, e, connection, cursor):
+ return False
+
+ def reset_isolation_level(self, dbapi_conn):
+ # default_isolation_level is read from the first connection
+ # after the initial set of 'isolation_level', if any, so is
+ # the configured default of this dialect.
+ self.set_isolation_level(dbapi_conn, self.default_isolation_level)
+
+ def normalize_name(self, name):
+ if name is None:
+ return None
+ if util.py2k:
+ if isinstance(name, str):
+ name = name.decode(self.encoding)
+
+ name_lower = name.lower()
+ name_upper = name.upper()
+
+ if name_upper == name_lower:
+ # name has no upper/lower conversion, e.g. non-european characters.
+ # return unchanged
+ return name
+ elif name_upper == name and not (
+ self.identifier_preparer._requires_quotes
+ )(name_lower):
+ # name is all uppercase and doesn't require quoting; normalize
+ # to all lower case
+ return name_lower
+ elif name_lower == name:
+ # name is all lower case, which if denormalized means we need to
+ # force quoting on it
+ return quoted_name(name, quote=True)
+ else:
+ # name is mixed case, means it will be quoted in SQL when used
+ # later, no normalizes
+ return name
+
+ def denormalize_name(self, name):
+ if name is None:
+ return None
+
+ name_lower = name.lower()
+ name_upper = name.upper()
+
+ if name_upper == name_lower:
+ # name has no upper/lower conversion, e.g. non-european characters.
+ # return unchanged
+ return name
+ elif name_lower == name and not (
+ self.identifier_preparer._requires_quotes
+ )(name_lower):
+ name = name_upper
+ if util.py2k:
+ if not self.supports_unicode_binds:
+ name = name.encode(self.encoding)
+ else:
+ name = unicode(name) # noqa
+ return name
+
+ def get_driver_connection(self, connection):
+ return connection
+
+
+class _RendersLiteral(object):
+ def literal_processor(self, dialect):
+ def process(value):
+ return "'%s'" % value
+
+ return process
+
+
+class _StrDateTime(_RendersLiteral, sqltypes.DateTime):
+ pass
+
+
+class _StrDate(_RendersLiteral, sqltypes.Date):
+ pass
+
+
+class _StrTime(_RendersLiteral, sqltypes.Time):
+ pass
+
+
+class StrCompileDialect(DefaultDialect):
+
+ statement_compiler = compiler.StrSQLCompiler
+ ddl_compiler = compiler.DDLCompiler
+ type_compiler = compiler.StrSQLTypeCompiler
+ preparer = compiler.IdentifierPreparer
+
+ supports_statement_cache = True
+
+ supports_identity_columns = True
+
+ supports_sequences = True
+ sequences_optional = True
+ preexecute_autoincrement_sequences = False
+ implicit_returning = False
+
+ supports_native_boolean = True
+
+ supports_multivalues_insert = True
+ supports_simple_order_by_label = True
+
+ colspecs = {
+ sqltypes.DateTime: _StrDateTime,
+ sqltypes.Date: _StrDate,
+ sqltypes.Time: _StrTime,
+ }
+
+
+class DefaultExecutionContext(interfaces.ExecutionContext):
+ isinsert = False
+ isupdate = False
+ isdelete = False
+ is_crud = False
+ is_text = False
+ isddl = False
+ executemany = False
+ compiled = None
+ statement = None
+ result_column_struct = None
+ returned_default_rows = None
+ execution_options = util.immutabledict()
+
+ include_set_input_sizes = None
+ exclude_set_input_sizes = None
+
+ cursor_fetch_strategy = _cursor._DEFAULT_FETCH
+
+ cache_stats = None
+ invoked_statement = None
+
+ _is_implicit_returning = False
+ _is_explicit_returning = False
+ _is_future_result = False
+ _is_server_side = False
+
+ _soft_closed = False
+
+ # a hook for SQLite's translation of
+ # result column names
+ # NOTE: pyhive is using this hook, can't remove it :(
+ _translate_colname = None
+
+ _expanded_parameters = util.immutabledict()
+
+ cache_hit = NO_CACHE_KEY
+
+ @classmethod
+ def _init_ddl(
+ cls,
+ dialect,
+ connection,
+ dbapi_connection,
+ execution_options,
+ compiled_ddl,
+ ):
+ """Initialize execution context for a DDLElement construct."""
+
+ self = cls.__new__(cls)
+ self.root_connection = connection
+ self._dbapi_connection = dbapi_connection
+ self.dialect = connection.dialect
+
+ self.compiled = compiled = compiled_ddl
+ self.isddl = True
+
+ self.execution_options = execution_options
+
+ self._is_future_result = (
+ connection._is_future
+ or self.execution_options.get("future_result", False)
+ )
+
+ self.unicode_statement = util.text_type(compiled)
+ if compiled.schema_translate_map:
+ schema_translate_map = self.execution_options.get(
+ "schema_translate_map", {}
+ )
+
+ rst = compiled.preparer._render_schema_translates
+ self.unicode_statement = rst(
+ self.unicode_statement, schema_translate_map
+ )
+
+ if not dialect.supports_unicode_statements:
+ self.statement = dialect._encoder(self.unicode_statement)[0]
+ else:
+ self.statement = self.unicode_statement
+
+ self.cursor = self.create_cursor()
+ self.compiled_parameters = []
+
+ if dialect.positional:
+ self.parameters = [dialect.execute_sequence_format()]
+ else:
+ self.parameters = [{}]
+
+ return self
+
+ @classmethod
+ def _init_compiled(
+ cls,
+ dialect,
+ connection,
+ dbapi_connection,
+ execution_options,
+ compiled,
+ parameters,
+ invoked_statement,
+ extracted_parameters,
+ cache_hit=CACHING_DISABLED,
+ ):
+ """Initialize execution context for a Compiled construct."""
+
+ self = cls.__new__(cls)
+ self.root_connection = connection
+ self._dbapi_connection = dbapi_connection
+ self.dialect = connection.dialect
+ self.extracted_parameters = extracted_parameters
+ self.invoked_statement = invoked_statement
+ self.compiled = compiled
+ self.cache_hit = cache_hit
+
+ self.execution_options = execution_options
+
+ self._is_future_result = (
+ connection._is_future
+ or self.execution_options.get("future_result", False)
+ )
+
+ self.result_column_struct = (
+ compiled._result_columns,
+ compiled._ordered_columns,
+ compiled._textual_ordered_columns,
+ compiled._loose_column_name_matching,
+ )
+ self.isinsert = compiled.isinsert
+ self.isupdate = compiled.isupdate
+ self.isdelete = compiled.isdelete
+ self.is_text = compiled.isplaintext
+
+ if self.isinsert or self.isupdate or self.isdelete:
+ self.is_crud = True
+ self._is_explicit_returning = bool(compiled.statement._returning)
+ self._is_implicit_returning = bool(
+ compiled.returning and not compiled.statement._returning
+ )
+
+ if not parameters:
+ self.compiled_parameters = [
+ compiled.construct_params(
+ extracted_parameters=extracted_parameters,
+ escape_names=False,
+ )
+ ]
+ else:
+ self.compiled_parameters = [
+ compiled.construct_params(
+ m,
+ escape_names=False,
+ _group_number=grp,
+ extracted_parameters=extracted_parameters,
+ )
+ for grp, m in enumerate(parameters)
+ ]
+
+ self.executemany = len(parameters) > 1
+
+ # this must occur before create_cursor() since the statement
+ # has to be regexed in some cases for server side cursor
+ if util.py2k:
+ self.unicode_statement = util.text_type(compiled.string)
+ else:
+ self.unicode_statement = compiled.string
+
+ self.cursor = self.create_cursor()
+
+ if self.compiled.insert_prefetch or self.compiled.update_prefetch:
+ if self.executemany:
+ self._process_executemany_defaults()
+ else:
+ self._process_executesingle_defaults()
+
+ processors = compiled._bind_processors
+
+ if compiled.literal_execute_params or compiled.post_compile_params:
+ if self.executemany:
+ raise exc.InvalidRequestError(
+ "'literal_execute' or 'expanding' parameters can't be "
+ "used with executemany()"
+ )
+
+ expanded_state = compiled._process_parameters_for_postcompile(
+ self.compiled_parameters[0]
+ )
+
+ # re-assign self.unicode_statement
+ self.unicode_statement = expanded_state.statement
+
+ # used by set_input_sizes() which is needed for Oracle
+ self._expanded_parameters = expanded_state.parameter_expansion
+
+ processors = dict(processors)
+ processors.update(expanded_state.processors)
+ positiontup = expanded_state.positiontup
+ elif compiled.positional:
+ positiontup = self.compiled.positiontup
+
+ if compiled.schema_translate_map:
+ schema_translate_map = self.execution_options.get(
+ "schema_translate_map", {}
+ )
+ rst = compiled.preparer._render_schema_translates
+ self.unicode_statement = rst(
+ self.unicode_statement, schema_translate_map
+ )
+
+ # final self.unicode_statement is now assigned, encode if needed
+ # by dialect
+ if not dialect.supports_unicode_statements:
+ self.statement = self.unicode_statement.encode(
+ self.dialect.encoding
+ )
+ else:
+ self.statement = self.unicode_statement
+
+ # Convert the dictionary of bind parameter values
+ # into a dict or list to be sent to the DBAPI's
+ # execute() or executemany() method.
+ parameters = []
+ if compiled.positional:
+ for compiled_params in self.compiled_parameters:
+ param = [
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ for key in positiontup
+ ]
+ parameters.append(dialect.execute_sequence_format(param))
+ else:
+ encode = not dialect.supports_unicode_statements
+ if encode:
+ encoder = dialect._encoder
+ for compiled_params in self.compiled_parameters:
+ escaped_bind_names = compiled.escaped_bind_names
+
+ if encode:
+ if escaped_bind_names:
+ param = {
+ encoder(escaped_bind_names.get(key, key))[
+ 0
+ ]: processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ for key in compiled_params
+ }
+ else:
+ param = {
+ encoder(key)[0]: processors[key](
+ compiled_params[key]
+ )
+ if key in processors
+ else compiled_params[key]
+ for key in compiled_params
+ }
+ else:
+ if escaped_bind_names:
+ param = {
+ escaped_bind_names.get(key, key): processors[key](
+ compiled_params[key]
+ )
+ if key in processors
+ else compiled_params[key]
+ for key in compiled_params
+ }
+ else:
+ param = {
+ key: processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ for key in compiled_params
+ }
+
+ parameters.append(param)
+
+ self.parameters = dialect.execute_sequence_format(parameters)
+
+ return self
+
+ @classmethod
+ def _init_statement(
+ cls,
+ dialect,
+ connection,
+ dbapi_connection,
+ execution_options,
+ statement,
+ parameters,
+ ):
+ """Initialize execution context for a string SQL statement."""
+
+ self = cls.__new__(cls)
+ self.root_connection = connection
+ self._dbapi_connection = dbapi_connection
+ self.dialect = connection.dialect
+ self.is_text = True
+
+ self.execution_options = execution_options
+
+ self._is_future_result = (
+ connection._is_future
+ or self.execution_options.get("future_result", False)
+ )
+
+ if not parameters:
+ if self.dialect.positional:
+ self.parameters = [dialect.execute_sequence_format()]
+ else:
+ self.parameters = [{}]
+ elif isinstance(parameters[0], dialect.execute_sequence_format):
+ self.parameters = parameters
+ elif isinstance(parameters[0], dict):
+ if dialect.supports_unicode_statements:
+ self.parameters = parameters
+ else:
+ self.parameters = [
+ {dialect._encoder(k)[0]: d[k] for k in d}
+ for d in parameters
+ ] or [{}]
+ else:
+ self.parameters = [
+ dialect.execute_sequence_format(p) for p in parameters
+ ]
+
+ self.executemany = len(parameters) > 1
+
+ if not dialect.supports_unicode_statements and isinstance(
+ statement, util.text_type
+ ):
+ self.unicode_statement = statement
+ self.statement = dialect._encoder(statement)[0]
+ else:
+ self.statement = self.unicode_statement = statement
+
+ self.cursor = self.create_cursor()
+ return self
+
+ @classmethod
+ def _init_default(
+ cls, dialect, connection, dbapi_connection, execution_options
+ ):
+ """Initialize execution context for a ColumnDefault construct."""
+
+ self = cls.__new__(cls)
+ self.root_connection = connection
+ self._dbapi_connection = dbapi_connection
+ self.dialect = connection.dialect
+
+ self.execution_options = execution_options
+
+ self._is_future_result = (
+ connection._is_future
+ or self.execution_options.get("future_result", False)
+ )
+
+ self.cursor = self.create_cursor()
+ return self
+
+ def _get_cache_stats(self):
+ if self.compiled is None:
+ return "raw sql"
+
+ now = util.perf_counter()
+
+ ch = self.cache_hit
+
+ if ch is NO_CACHE_KEY:
+ return "no key %.5fs" % (now - self.compiled._gen_time,)
+ elif ch is CACHE_HIT:
+ return "cached since %.4gs ago" % (now - self.compiled._gen_time,)
+ elif ch is CACHE_MISS:
+ return "generated in %.5fs" % (now - self.compiled._gen_time,)
+ elif ch is CACHING_DISABLED:
+ return "caching disabled %.5fs" % (now - self.compiled._gen_time,)
+ elif ch is NO_DIALECT_SUPPORT:
+ return "dialect %s+%s does not support caching %.5fs" % (
+ self.dialect.name,
+ self.dialect.driver,
+ now - self.compiled._gen_time,
+ )
+ else:
+ return "unknown"
+
+ @util.memoized_property
+ def identifier_preparer(self):
+ if self.compiled:
+ return self.compiled.preparer
+ elif "schema_translate_map" in self.execution_options:
+ return self.dialect.identifier_preparer._with_schema_translate(
+ self.execution_options["schema_translate_map"]
+ )
+ else:
+ return self.dialect.identifier_preparer
+
+ @util.memoized_property
+ def engine(self):
+ return self.root_connection.engine
+
+ @util.memoized_property
+ def postfetch_cols(self):
+ return self.compiled.postfetch
+
+ @util.memoized_property
+ def prefetch_cols(self):
+ if self.isinsert:
+ return self.compiled.insert_prefetch
+ elif self.isupdate:
+ return self.compiled.update_prefetch
+ else:
+ return ()
+
+ @util.memoized_property
+ def returning_cols(self):
+ self.compiled.returning
+
+ @util.memoized_property
+ def no_parameters(self):
+ return self.execution_options.get("no_parameters", False)
+
+ @util.memoized_property
+ def should_autocommit(self):
+ autocommit = self.execution_options.get(
+ "autocommit",
+ not self.compiled
+ and self.statement
+ and expression.PARSE_AUTOCOMMIT
+ or False,
+ )
+
+ if autocommit is expression.PARSE_AUTOCOMMIT:
+ return self.should_autocommit_text(self.unicode_statement)
+ else:
+ return autocommit
+
+ def _execute_scalar(self, stmt, type_, parameters=None):
+ """Execute a string statement on the current cursor, returning a
+ scalar result.
+
+ Used to fire off sequences, default phrases, and "select lastrowid"
+ types of statements individually or in the context of a parent INSERT
+ or UPDATE statement.
+
+ """
+
+ conn = self.root_connection
+ if (
+ isinstance(stmt, util.text_type)
+ and not self.dialect.supports_unicode_statements
+ ):
+ stmt = self.dialect._encoder(stmt)[0]
+
+ if "schema_translate_map" in self.execution_options:
+ schema_translate_map = self.execution_options.get(
+ "schema_translate_map", {}
+ )
+
+ rst = self.identifier_preparer._render_schema_translates
+ stmt = rst(stmt, schema_translate_map)
+
+ if not parameters:
+ if self.dialect.positional:
+ parameters = self.dialect.execute_sequence_format()
+ else:
+ parameters = {}
+
+ conn._cursor_execute(self.cursor, stmt, parameters, context=self)
+ r = self.cursor.fetchone()[0]
+ if type_ is not None:
+ # apply type post processors to the result
+ proc = type_._cached_result_processor(
+ self.dialect, self.cursor.description[0][1]
+ )
+ if proc:
+ return proc(r)
+ return r
+
+ @property
+ def connection(self):
+ conn = self.root_connection
+ if conn._is_future:
+ return conn
+ else:
+ return conn._branch()
+
+ def should_autocommit_text(self, statement):
+ return AUTOCOMMIT_REGEXP.match(statement)
+
+ def _use_server_side_cursor(self):
+ if not self.dialect.supports_server_side_cursors:
+ return False
+
+ if self.dialect.server_side_cursors:
+ # this is deprecated
+ use_server_side = self.execution_options.get(
+ "stream_results", True
+ ) and (
+ (
+ self.compiled
+ and isinstance(
+ self.compiled.statement, expression.Selectable
+ )
+ or (
+ (
+ not self.compiled
+ or isinstance(
+ self.compiled.statement, expression.TextClause
+ )
+ )
+ and self.unicode_statement
+ and SERVER_SIDE_CURSOR_RE.match(self.unicode_statement)
+ )
+ )
+ )
+ else:
+ use_server_side = self.execution_options.get(
+ "stream_results", False
+ )
+
+ return use_server_side
+
+ def create_cursor(self):
+ if (
+ # inlining initial preference checks for SS cursors
+ self.dialect.supports_server_side_cursors
+ and (
+ self.execution_options.get("stream_results", False)
+ or (
+ self.dialect.server_side_cursors
+ and self._use_server_side_cursor()
+ )
+ )
+ ):
+ self._is_server_side = True
+ return self.create_server_side_cursor()
+ else:
+ self._is_server_side = False
+ return self.create_default_cursor()
+
+ def create_default_cursor(self):
+ return self._dbapi_connection.cursor()
+
+ def create_server_side_cursor(self):
+ raise NotImplementedError()
+
+ def pre_exec(self):
+ pass
+
+ def get_out_parameter_values(self, names):
+ raise NotImplementedError(
+ "This dialect does not support OUT parameters"
+ )
+
+ def post_exec(self):
+ pass
+
+ def get_result_processor(self, type_, colname, coltype):
+ """Return a 'result processor' for a given type as present in
+ cursor.description.
+
+ This has a default implementation that dialects can override
+ for context-sensitive result type handling.
+
+ """
+ return type_._cached_result_processor(self.dialect, coltype)
+
+ def get_lastrowid(self):
+ """return self.cursor.lastrowid, or equivalent, after an INSERT.
+
+ This may involve calling special cursor functions, issuing a new SELECT
+ on the cursor (or a new one), or returning a stored value that was
+ calculated within post_exec().
+
+ This function will only be called for dialects which support "implicit"
+ primary key generation, keep preexecute_autoincrement_sequences set to
+ False, and when no explicit id value was bound to the statement.
+
+ The function is called once for an INSERT statement that would need to
+ return the last inserted primary key for those dialects that make use
+ of the lastrowid concept. In these cases, it is called directly after
+ :meth:`.ExecutionContext.post_exec`.
+
+ """
+ return self.cursor.lastrowid
+
+ def handle_dbapi_exception(self, e):
+ pass
+
+ @property
+ def rowcount(self):
+ return self.cursor.rowcount
+
+ def supports_sane_rowcount(self):
+ return self.dialect.supports_sane_rowcount
+
+ def supports_sane_multi_rowcount(self):
+ return self.dialect.supports_sane_multi_rowcount
+
+ def _setup_result_proxy(self):
+ exec_opt = self.execution_options
+
+ if self.is_crud or self.is_text:
+ result = self._setup_dml_or_text_result()
+ yp = sr = False
+ else:
+ yp = exec_opt.get("yield_per", None)
+ sr = self._is_server_side or exec_opt.get("stream_results", False)
+ strategy = self.cursor_fetch_strategy
+ if sr and strategy is _cursor._DEFAULT_FETCH:
+ strategy = _cursor.BufferedRowCursorFetchStrategy(
+ self.cursor, self.execution_options
+ )
+ cursor_description = (
+ strategy.alternate_cursor_description
+ or self.cursor.description
+ )
+ if cursor_description is None:
+ strategy = _cursor._NO_CURSOR_DQL
+
+ if self._is_future_result:
+ if self.root_connection.should_close_with_result:
+ raise exc.InvalidRequestError(
+ "can't use future_result=True with close_with_result"
+ )
+ result = _cursor.CursorResult(
+ self, strategy, cursor_description
+ )
+ else:
+ result = _cursor.LegacyCursorResult(
+ self, strategy, cursor_description
+ )
+
+ if (
+ self.compiled
+ and not self.isddl
+ and self.compiled.has_out_parameters
+ ):
+ self._setup_out_parameters(result)
+
+ self._soft_closed = result._soft_closed
+
+ if yp:
+ result = result.yield_per(yp)
+
+ return result
+
+ def _setup_out_parameters(self, result):
+
+ out_bindparams = [
+ (param, name)
+ for param, name in self.compiled.bind_names.items()
+ if param.isoutparam
+ ]
+ out_parameters = {}
+
+ for bindparam, raw_value in zip(
+ [param for param, name in out_bindparams],
+ self.get_out_parameter_values(
+ [name for param, name in out_bindparams]
+ ),
+ ):
+
+ type_ = bindparam.type
+ impl_type = type_.dialect_impl(self.dialect)
+ dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi)
+ result_processor = impl_type.result_processor(
+ self.dialect, dbapi_type
+ )
+ if result_processor is not None:
+ raw_value = result_processor(raw_value)
+ out_parameters[bindparam.key] = raw_value
+
+ result.out_parameters = out_parameters
+
+ def _setup_dml_or_text_result(self):
+ if self.isinsert:
+ if self.compiled.postfetch_lastrowid:
+ self.inserted_primary_key_rows = (
+ self._setup_ins_pk_from_lastrowid()
+ )
+ # else if not self._is_implicit_returning,
+ # the default inserted_primary_key_rows accessor will
+ # return an "empty" primary key collection when accessed.
+
+ strategy = self.cursor_fetch_strategy
+ if self._is_server_side and strategy is _cursor._DEFAULT_FETCH:
+ strategy = _cursor.BufferedRowCursorFetchStrategy(
+ self.cursor, self.execution_options
+ )
+ cursor_description = (
+ strategy.alternate_cursor_description or self.cursor.description
+ )
+ if cursor_description is None:
+ strategy = _cursor._NO_CURSOR_DML
+
+ if self._is_future_result:
+ result = _cursor.CursorResult(self, strategy, cursor_description)
+ else:
+ result = _cursor.LegacyCursorResult(
+ self, strategy, cursor_description
+ )
+
+ if self.isinsert:
+ if self._is_implicit_returning:
+ rows = result.all()
+
+ self.returned_default_rows = rows
+
+ self.inserted_primary_key_rows = (
+ self._setup_ins_pk_from_implicit_returning(result, rows)
+ )
+
+ # test that it has a cursor metadata that is accurate. the
+ # first row will have been fetched and current assumptions
+ # are that the result has only one row, until executemany()
+ # support is added here.
+ assert result._metadata.returns_rows
+ result._soft_close()
+ elif not self._is_explicit_returning:
+ result._soft_close()
+
+ # we assume here the result does not return any rows.
+ # *usually*, this will be true. However, some dialects
+ # such as that of MSSQL/pyodbc need to SELECT a post fetch
+ # function so this is not necessarily true.
+ # assert not result.returns_rows
+
+ elif self.isupdate and self._is_implicit_returning:
+ row = result.fetchone()
+ self.returned_default_rows = [row]
+ result._soft_close()
+
+ # test that it has a cursor metadata that is accurate.
+ # the rows have all been fetched however.
+ assert result._metadata.returns_rows
+
+ elif not result._metadata.returns_rows:
+ # no results, get rowcount
+ # (which requires open cursor on some drivers
+ # such as kintersbasdb, mxodbc)
+ result.rowcount
+ result._soft_close()
+ return result
+
+ @util.memoized_property
+ def inserted_primary_key_rows(self):
+ # if no specific "get primary key" strategy was set up
+ # during execution, return a "default" primary key based
+ # on what's in the compiled_parameters and nothing else.
+ return self._setup_ins_pk_from_empty()
+
+ def _setup_ins_pk_from_lastrowid(self):
+ getter = self.compiled._inserted_primary_key_from_lastrowid_getter
+
+ lastrowid = self.get_lastrowid()
+ return [getter(lastrowid, self.compiled_parameters[0])]
+
+ def _setup_ins_pk_from_empty(self):
+ getter = self.compiled._inserted_primary_key_from_lastrowid_getter
+ return [getter(None, param) for param in self.compiled_parameters]
+
+ def _setup_ins_pk_from_implicit_returning(self, result, rows):
+
+ if not rows:
+ return []
+
+ getter = self.compiled._inserted_primary_key_from_returning_getter
+ compiled_params = self.compiled_parameters
+
+ return [
+ getter(row, param) for row, param in zip(rows, compiled_params)
+ ]
+
+ def lastrow_has_defaults(self):
+ return (self.isinsert or self.isupdate) and bool(
+ self.compiled.postfetch
+ )
+
+ def _set_input_sizes(self):
+ """Given a cursor and ClauseParameters, call the appropriate
+ style of ``setinputsizes()`` on the cursor, using DB-API types
+ from the bind parameter's ``TypeEngine`` objects.
+
+ This method only called by those dialects which require it,
+ currently cx_oracle, asyncpg and pg8000.
+
+ """
+ if self.isddl or self.is_text:
+ return
+
+ inputsizes = self.compiled._get_set_input_sizes_lookup(
+ include_types=self.include_set_input_sizes,
+ exclude_types=self.exclude_set_input_sizes,
+ )
+
+ if inputsizes is None:
+ return
+
+ if self.dialect._has_events:
+ inputsizes = dict(inputsizes)
+ self.dialect.dispatch.do_setinputsizes(
+ inputsizes, self.cursor, self.statement, self.parameters, self
+ )
+
+ has_escaped_names = bool(self.compiled.escaped_bind_names)
+ if has_escaped_names:
+ escaped_bind_names = self.compiled.escaped_bind_names
+
+ if self.dialect.positional:
+ items = [
+ (key, self.compiled.binds[key])
+ for key in self.compiled.positiontup
+ ]
+ else:
+ items = [
+ (key, bindparam)
+ for bindparam, key in self.compiled.bind_names.items()
+ ]
+
+ generic_inputsizes = []
+ for key, bindparam in items:
+ if bindparam in self.compiled.literal_execute_params:
+ continue
+
+ if key in self._expanded_parameters:
+ if bindparam.type._is_tuple_type:
+ num = len(bindparam.type.types)
+ dbtypes = inputsizes[bindparam]
+ generic_inputsizes.extend(
+ (
+ (
+ escaped_bind_names.get(paramname, paramname)
+ if has_escaped_names
+ else paramname
+ ),
+ dbtypes[idx % num],
+ bindparam.type.types[idx % num],
+ )
+ for idx, paramname in enumerate(
+ self._expanded_parameters[key]
+ )
+ )
+ else:
+ dbtype = inputsizes.get(bindparam, None)
+ generic_inputsizes.extend(
+ (
+ (
+ escaped_bind_names.get(paramname, paramname)
+ if has_escaped_names
+ else paramname
+ ),
+ dbtype,
+ bindparam.type,
+ )
+ for paramname in self._expanded_parameters[key]
+ )
+ else:
+ dbtype = inputsizes.get(bindparam, None)
+
+ escaped_name = (
+ escaped_bind_names.get(key, key)
+ if has_escaped_names
+ else key
+ )
+
+ generic_inputsizes.append(
+ (escaped_name, dbtype, bindparam.type)
+ )
+ try:
+ self.dialect.do_set_input_sizes(
+ self.cursor, generic_inputsizes, self
+ )
+ except BaseException as e:
+ self.root_connection._handle_dbapi_exception(
+ e, None, None, None, self
+ )
+
+ def _exec_default(self, column, default, type_):
+ if default.is_sequence:
+ return self.fire_sequence(default, type_)
+ elif default.is_callable:
+ self.current_column = column
+ return default.arg(self)
+ elif default.is_clause_element:
+ return self._exec_default_clause_element(column, default, type_)
+ else:
+ return default.arg
+
+ def _exec_default_clause_element(self, column, default, type_):
+ # execute a default that's a complete clause element. Here, we have
+ # to re-implement a miniature version of the compile->parameters->
+ # cursor.execute() sequence, since we don't want to modify the state
+ # of the connection / result in progress or create new connection/
+ # result objects etc.
+ # .. versionchanged:: 1.4
+
+ if not default._arg_is_typed:
+ default_arg = expression.type_coerce(default.arg, type_)
+ else:
+ default_arg = default.arg
+ compiled = expression.select(default_arg).compile(dialect=self.dialect)
+ compiled_params = compiled.construct_params()
+ processors = compiled._bind_processors
+ if compiled.positional:
+ positiontup = compiled.positiontup
+ parameters = self.dialect.execute_sequence_format(
+ [
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ for key in positiontup
+ ]
+ )
+ else:
+ parameters = dict(
+ (
+ key,
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key],
+ )
+ for key in compiled_params
+ )
+ return self._execute_scalar(
+ util.text_type(compiled), type_, parameters=parameters
+ )
+
+ current_parameters = None
+ """A dictionary of parameters applied to the current row.
+
+ This attribute is only available in the context of a user-defined default
+ generation function, e.g. as described at :ref:`context_default_functions`.
+ It consists of a dictionary which includes entries for each column/value
+ pair that is to be part of the INSERT or UPDATE statement. The keys of the
+ dictionary will be the key value of each :class:`_schema.Column`,
+ which is usually
+ synonymous with the name.
+
+ Note that the :attr:`.DefaultExecutionContext.current_parameters` attribute
+ does not accommodate for the "multi-values" feature of the
+ :meth:`_expression.Insert.values` method. The
+ :meth:`.DefaultExecutionContext.get_current_parameters` method should be
+ preferred.
+
+ .. seealso::
+
+ :meth:`.DefaultExecutionContext.get_current_parameters`
+
+ :ref:`context_default_functions`
+
+ """
+
+ def get_current_parameters(self, isolate_multiinsert_groups=True):
+ """Return a dictionary of parameters applied to the current row.
+
+ This method can only be used in the context of a user-defined default
+ generation function, e.g. as described at
+ :ref:`context_default_functions`. When invoked, a dictionary is
+ returned which includes entries for each column/value pair that is part
+ of the INSERT or UPDATE statement. The keys of the dictionary will be
+ the key value of each :class:`_schema.Column`,
+ which is usually synonymous
+ with the name.
+
+ :param isolate_multiinsert_groups=True: indicates that multi-valued
+ INSERT constructs created using :meth:`_expression.Insert.values`
+ should be
+ handled by returning only the subset of parameters that are local
+ to the current column default invocation. When ``False``, the
+ raw parameters of the statement are returned including the
+ naming convention used in the case of multi-valued INSERT.
+
+ .. versionadded:: 1.2 added
+ :meth:`.DefaultExecutionContext.get_current_parameters`
+ which provides more functionality over the existing
+ :attr:`.DefaultExecutionContext.current_parameters`
+ attribute.
+
+ .. seealso::
+
+ :attr:`.DefaultExecutionContext.current_parameters`
+
+ :ref:`context_default_functions`
+
+ """
+ try:
+ parameters = self.current_parameters
+ column = self.current_column
+ except AttributeError:
+ raise exc.InvalidRequestError(
+ "get_current_parameters() can only be invoked in the "
+ "context of a Python side column default function"
+ )
+
+ compile_state = self.compiled.compile_state
+ if (
+ isolate_multiinsert_groups
+ and self.isinsert
+ and compile_state._has_multi_parameters
+ ):
+ if column._is_multiparam_column:
+ index = column.index + 1
+ d = {column.original.key: parameters[column.key]}
+ else:
+ d = {column.key: parameters[column.key]}
+ index = 0
+ keys = compile_state._dict_parameters.keys()
+ d.update(
+ (key, parameters["%s_m%d" % (key, index)]) for key in keys
+ )
+ return d
+ else:
+ return parameters
+
+ def get_insert_default(self, column):
+ if column.default is None:
+ return None
+ else:
+ return self._exec_default(column, column.default, column.type)
+
+ def get_update_default(self, column):
+ if column.onupdate is None:
+ return None
+ else:
+ return self._exec_default(column, column.onupdate, column.type)
+
+ def _process_executemany_defaults(self):
+ key_getter = self.compiled._within_exec_param_key_getter
+
+ scalar_defaults = {}
+
+ insert_prefetch = self.compiled.insert_prefetch
+ update_prefetch = self.compiled.update_prefetch
+
+ # pre-determine scalar Python-side defaults
+ # to avoid many calls of get_insert_default()/
+ # get_update_default()
+ for c in insert_prefetch:
+ if c.default and not c.default.is_sequence and c.default.is_scalar:
+ scalar_defaults[c] = c.default.arg
+
+ for c in update_prefetch:
+ if c.onupdate and c.onupdate.is_scalar:
+ scalar_defaults[c] = c.onupdate.arg
+
+ for param in self.compiled_parameters:
+ self.current_parameters = param
+ for c in insert_prefetch:
+ if c in scalar_defaults:
+ val = scalar_defaults[c]
+ else:
+ val = self.get_insert_default(c)
+ if val is not None:
+ param[key_getter(c)] = val
+ for c in update_prefetch:
+ if c in scalar_defaults:
+ val = scalar_defaults[c]
+ else:
+ val = self.get_update_default(c)
+ if val is not None:
+ param[key_getter(c)] = val
+
+ del self.current_parameters
+
+ def _process_executesingle_defaults(self):
+ key_getter = self.compiled._within_exec_param_key_getter
+ self.current_parameters = (
+ compiled_parameters
+ ) = self.compiled_parameters[0]
+
+ for c in self.compiled.insert_prefetch:
+ if c.default and not c.default.is_sequence and c.default.is_scalar:
+ val = c.default.arg
+ else:
+ val = self.get_insert_default(c)
+
+ if val is not None:
+ compiled_parameters[key_getter(c)] = val
+
+ for c in self.compiled.update_prefetch:
+ val = self.get_update_default(c)
+
+ if val is not None:
+ compiled_parameters[key_getter(c)] = val
+ del self.current_parameters
+
+
+DefaultDialect.execution_ctx_cls = DefaultExecutionContext
diff --git a/lib/sqlalchemy/engine/events.py b/lib/sqlalchemy/engine/events.py
new file mode 100644
index 0000000..286c4d4
--- /dev/null
+++ b/lib/sqlalchemy/engine/events.py
@@ -0,0 +1,835 @@
+# sqlalchemy/engine/events.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+
+from .base import Engine
+from .interfaces import Connectable
+from .interfaces import Dialect
+from .. import event
+from .. import exc
+
+
+class ConnectionEvents(event.Events):
+ """Available events for :class:`.Connectable`, which includes
+ :class:`_engine.Connection` and :class:`_engine.Engine`.
+
+ The methods here define the name of an event as well as the names of
+ members that are passed to listener functions.
+
+ An event listener can be associated with any :class:`.Connectable`
+ class or instance, such as an :class:`_engine.Engine`, e.g.::
+
+ from sqlalchemy import event, create_engine
+
+ def before_cursor_execute(conn, cursor, statement, parameters, context,
+ executemany):
+ log.info("Received statement: %s", statement)
+
+ engine = create_engine('postgresql://scott:tiger@localhost/test')
+ event.listen(engine, "before_cursor_execute", before_cursor_execute)
+
+ or with a specific :class:`_engine.Connection`::
+
+ with engine.begin() as conn:
+ @event.listens_for(conn, 'before_cursor_execute')
+ def before_cursor_execute(conn, cursor, statement, parameters,
+ context, executemany):
+ log.info("Received statement: %s", statement)
+
+ When the methods are called with a `statement` parameter, such as in
+ :meth:`.after_cursor_execute` or :meth:`.before_cursor_execute`,
+ the statement is the exact SQL string that was prepared for transmission
+ to the DBAPI ``cursor`` in the connection's :class:`.Dialect`.
+
+ The :meth:`.before_execute` and :meth:`.before_cursor_execute`
+ events can also be established with the ``retval=True`` flag, which
+ allows modification of the statement and parameters to be sent
+ to the database. The :meth:`.before_cursor_execute` event is
+ particularly useful here to add ad-hoc string transformations, such
+ as comments, to all executions::
+
+ from sqlalchemy.engine import Engine
+ from sqlalchemy import event
+
+ @event.listens_for(Engine, "before_cursor_execute", retval=True)
+ def comment_sql_calls(conn, cursor, statement, parameters,
+ context, executemany):
+ statement = statement + " -- some comment"
+ return statement, parameters
+
+ .. note:: :class:`_events.ConnectionEvents` can be established on any
+ combination of :class:`_engine.Engine`, :class:`_engine.Connection`,
+ as well
+ as instances of each of those classes. Events across all
+ four scopes will fire off for a given instance of
+ :class:`_engine.Connection`. However, for performance reasons, the
+ :class:`_engine.Connection` object determines at instantiation time
+ whether or not its parent :class:`_engine.Engine` has event listeners
+ established. Event listeners added to the :class:`_engine.Engine`
+ class or to an instance of :class:`_engine.Engine`
+ *after* the instantiation
+ of a dependent :class:`_engine.Connection` instance will usually
+ *not* be available on that :class:`_engine.Connection` instance.
+ The newly
+ added listeners will instead take effect for
+ :class:`_engine.Connection`
+ instances created subsequent to those event listeners being
+ established on the parent :class:`_engine.Engine` class or instance.
+
+ :param retval=False: Applies to the :meth:`.before_execute` and
+ :meth:`.before_cursor_execute` events only. When True, the
+ user-defined event function must have a return value, which
+ is a tuple of parameters that replace the given statement
+ and parameters. See those methods for a description of
+ specific return arguments.
+
+ """
+
+ _target_class_doc = "SomeEngine"
+ _dispatch_target = Connectable
+
+ @classmethod
+ def _listen(cls, event_key, retval=False):
+ target, identifier, fn = (
+ event_key.dispatch_target,
+ event_key.identifier,
+ event_key._listen_fn,
+ )
+
+ target._has_events = True
+
+ if not retval:
+ if identifier == "before_execute":
+ orig_fn = fn
+
+ def wrap_before_execute(
+ conn, clauseelement, multiparams, params, execution_options
+ ):
+ orig_fn(
+ conn,
+ clauseelement,
+ multiparams,
+ params,
+ execution_options,
+ )
+ return clauseelement, multiparams, params
+
+ fn = wrap_before_execute
+ elif identifier == "before_cursor_execute":
+ orig_fn = fn
+
+ def wrap_before_cursor_execute(
+ conn, cursor, statement, parameters, context, executemany
+ ):
+ orig_fn(
+ conn,
+ cursor,
+ statement,
+ parameters,
+ context,
+ executemany,
+ )
+ return statement, parameters
+
+ fn = wrap_before_cursor_execute
+ elif retval and identifier not in (
+ "before_execute",
+ "before_cursor_execute",
+ "handle_error",
+ ):
+ raise exc.ArgumentError(
+ "Only the 'before_execute', "
+ "'before_cursor_execute' and 'handle_error' engine "
+ "event listeners accept the 'retval=True' "
+ "argument."
+ )
+ event_key.with_wrapper(fn).base_listen()
+
+ @event._legacy_signature(
+ "1.4",
+ ["conn", "clauseelement", "multiparams", "params"],
+ lambda conn, clauseelement, multiparams, params, execution_options: (
+ conn,
+ clauseelement,
+ multiparams,
+ params,
+ ),
+ )
+ def before_execute(
+ self, conn, clauseelement, multiparams, params, execution_options
+ ):
+ """Intercept high level execute() events, receiving uncompiled
+ SQL constructs and other objects prior to rendering into SQL.
+
+ This event is good for debugging SQL compilation issues as well
+ as early manipulation of the parameters being sent to the database,
+ as the parameter lists will be in a consistent format here.
+
+ This event can be optionally established with the ``retval=True``
+ flag. The ``clauseelement``, ``multiparams``, and ``params``
+ arguments should be returned as a three-tuple in this case::
+
+ @event.listens_for(Engine, "before_execute", retval=True)
+ def before_execute(conn, clauseelement, multiparams, params):
+ # do something with clauseelement, multiparams, params
+ return clauseelement, multiparams, params
+
+ :param conn: :class:`_engine.Connection` object
+ :param clauseelement: SQL expression construct, :class:`.Compiled`
+ instance, or string statement passed to
+ :meth:`_engine.Connection.execute`.
+ :param multiparams: Multiple parameter sets, a list of dictionaries.
+ :param params: Single parameter set, a single dictionary.
+ :param execution_options: dictionary of execution
+ options passed along with the statement, if any. This is a merge
+ of all options that will be used, including those of the statement,
+ the connection, and those passed in to the method itself for
+ the 2.0 style of execution.
+
+ .. versionadded: 1.4
+
+ .. seealso::
+
+ :meth:`.before_cursor_execute`
+
+ """
+
+ @event._legacy_signature(
+ "1.4",
+ ["conn", "clauseelement", "multiparams", "params", "result"],
+ lambda conn, clauseelement, multiparams, params, execution_options, result: ( # noqa
+ conn,
+ clauseelement,
+ multiparams,
+ params,
+ result,
+ ),
+ )
+ def after_execute(
+ self,
+ conn,
+ clauseelement,
+ multiparams,
+ params,
+ execution_options,
+ result,
+ ):
+ """Intercept high level execute() events after execute.
+
+
+ :param conn: :class:`_engine.Connection` object
+ :param clauseelement: SQL expression construct, :class:`.Compiled`
+ instance, or string statement passed to
+ :meth:`_engine.Connection.execute`.
+ :param multiparams: Multiple parameter sets, a list of dictionaries.
+ :param params: Single parameter set, a single dictionary.
+ :param execution_options: dictionary of execution
+ options passed along with the statement, if any. This is a merge
+ of all options that will be used, including those of the statement,
+ the connection, and those passed in to the method itself for
+ the 2.0 style of execution.
+
+ .. versionadded: 1.4
+
+ :param result: :class:`_engine.CursorResult` generated by the
+ execution.
+
+ """
+
+ def before_cursor_execute(
+ self, conn, cursor, statement, parameters, context, executemany
+ ):
+ """Intercept low-level cursor execute() events before execution,
+ receiving the string SQL statement and DBAPI-specific parameter list to
+ be invoked against a cursor.
+
+ This event is a good choice for logging as well as late modifications
+ to the SQL string. It's less ideal for parameter modifications except
+ for those which are specific to a target backend.
+
+ This event can be optionally established with the ``retval=True``
+ flag. The ``statement`` and ``parameters`` arguments should be
+ returned as a two-tuple in this case::
+
+ @event.listens_for(Engine, "before_cursor_execute", retval=True)
+ def before_cursor_execute(conn, cursor, statement,
+ parameters, context, executemany):
+ # do something with statement, parameters
+ return statement, parameters
+
+ See the example at :class:`_events.ConnectionEvents`.
+
+ :param conn: :class:`_engine.Connection` object
+ :param cursor: DBAPI cursor object
+ :param statement: string SQL statement, as to be passed to the DBAPI
+ :param parameters: Dictionary, tuple, or list of parameters being
+ passed to the ``execute()`` or ``executemany()`` method of the
+ DBAPI ``cursor``. In some cases may be ``None``.
+ :param context: :class:`.ExecutionContext` object in use. May
+ be ``None``.
+ :param executemany: boolean, if ``True``, this is an ``executemany()``
+ call, if ``False``, this is an ``execute()`` call.
+
+ .. seealso::
+
+ :meth:`.before_execute`
+
+ :meth:`.after_cursor_execute`
+
+ """
+
+ def after_cursor_execute(
+ self, conn, cursor, statement, parameters, context, executemany
+ ):
+ """Intercept low-level cursor execute() events after execution.
+
+ :param conn: :class:`_engine.Connection` object
+ :param cursor: DBAPI cursor object. Will have results pending
+ if the statement was a SELECT, but these should not be consumed
+ as they will be needed by the :class:`_engine.CursorResult`.
+ :param statement: string SQL statement, as passed to the DBAPI
+ :param parameters: Dictionary, tuple, or list of parameters being
+ passed to the ``execute()`` or ``executemany()`` method of the
+ DBAPI ``cursor``. In some cases may be ``None``.
+ :param context: :class:`.ExecutionContext` object in use. May
+ be ``None``.
+ :param executemany: boolean, if ``True``, this is an ``executemany()``
+ call, if ``False``, this is an ``execute()`` call.
+
+ """
+
+ def handle_error(self, exception_context):
+ r"""Intercept all exceptions processed by the
+ :class:`_engine.Connection`.
+
+ This includes all exceptions emitted by the DBAPI as well as
+ within SQLAlchemy's statement invocation process, including
+ encoding errors and other statement validation errors. Other areas
+ in which the event is invoked include transaction begin and end,
+ result row fetching, cursor creation.
+
+ Note that :meth:`.handle_error` may support new kinds of exceptions
+ and new calling scenarios at *any time*. Code which uses this
+ event must expect new calling patterns to be present in minor
+ releases.
+
+ To support the wide variety of members that correspond to an exception,
+ as well as to allow extensibility of the event without backwards
+ incompatibility, the sole argument received is an instance of
+ :class:`.ExceptionContext`. This object contains data members
+ representing detail about the exception.
+
+ Use cases supported by this hook include:
+
+ * read-only, low-level exception handling for logging and
+ debugging purposes
+ * exception re-writing
+ * Establishing or disabling whether a connection or the owning
+ connection pool is invalidated or expired in response to a
+ specific exception [1]_.
+
+ The hook is called while the cursor from the failed operation
+ (if any) is still open and accessible. Special cleanup operations
+ can be called on this cursor; SQLAlchemy will attempt to close
+ this cursor subsequent to this hook being invoked. If the connection
+ is in "autocommit" mode, the transaction also remains open within
+ the scope of this hook; the rollback of the per-statement transaction
+ also occurs after the hook is called.
+
+ .. note::
+
+ .. [1] The pool "pre_ping" handler enabled using the
+ :paramref:`_sa.create_engine.pool_pre_ping` parameter does
+ **not** consult this event before deciding if the "ping"
+ returned false, as opposed to receiving an unhandled error.
+ For this use case, the :ref:`legacy recipe based on
+ engine_connect() may be used
+ <pool_disconnects_pessimistic_custom>`. A future API allow
+ more comprehensive customization of the "disconnect"
+ detection mechanism across all functions.
+
+ A handler function has two options for replacing
+ the SQLAlchemy-constructed exception into one that is user
+ defined. It can either raise this new exception directly, in
+ which case all further event listeners are bypassed and the
+ exception will be raised, after appropriate cleanup as taken
+ place::
+
+ @event.listens_for(Engine, "handle_error")
+ def handle_exception(context):
+ if isinstance(context.original_exception,
+ psycopg2.OperationalError) and \
+ "failed" in str(context.original_exception):
+ raise MySpecialException("failed operation")
+
+ .. warning:: Because the
+ :meth:`_events.ConnectionEvents.handle_error`
+ event specifically provides for exceptions to be re-thrown as
+ the ultimate exception raised by the failed statement,
+ **stack traces will be misleading** if the user-defined event
+ handler itself fails and throws an unexpected exception;
+ the stack trace may not illustrate the actual code line that
+ failed! It is advised to code carefully here and use
+ logging and/or inline debugging if unexpected exceptions are
+ occurring.
+
+ Alternatively, a "chained" style of event handling can be
+ used, by configuring the handler with the ``retval=True``
+ modifier and returning the new exception instance from the
+ function. In this case, event handling will continue onto the
+ next handler. The "chained" exception is available using
+ :attr:`.ExceptionContext.chained_exception`::
+
+ @event.listens_for(Engine, "handle_error", retval=True)
+ def handle_exception(context):
+ if context.chained_exception is not None and \
+ "special" in context.chained_exception.message:
+ return MySpecialException("failed",
+ cause=context.chained_exception)
+
+ Handlers that return ``None`` may be used within the chain; when
+ a handler returns ``None``, the previous exception instance,
+ if any, is maintained as the current exception that is passed onto the
+ next handler.
+
+ When a custom exception is raised or returned, SQLAlchemy raises
+ this new exception as-is, it is not wrapped by any SQLAlchemy
+ object. If the exception is not a subclass of
+ :class:`sqlalchemy.exc.StatementError`,
+ certain features may not be available; currently this includes
+ the ORM's feature of adding a detail hint about "autoflush" to
+ exceptions raised within the autoflush process.
+
+ :param context: an :class:`.ExceptionContext` object. See this
+ class for details on all available members.
+
+ .. versionadded:: 0.9.7 Added the
+ :meth:`_events.ConnectionEvents.handle_error` hook.
+
+ .. versionchanged:: 1.1 The :meth:`.handle_error` event will now
+ receive all exceptions that inherit from ``BaseException``,
+ including ``SystemExit`` and ``KeyboardInterrupt``. The setting for
+ :attr:`.ExceptionContext.is_disconnect` is ``True`` in this case and
+ the default for
+ :attr:`.ExceptionContext.invalidate_pool_on_disconnect` is
+ ``False``.
+
+ .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now
+ invoked when an :class:`_engine.Engine` fails during the initial
+ call to :meth:`_engine.Engine.connect`, as well as when a
+ :class:`_engine.Connection` object encounters an error during a
+ reconnect operation.
+
+ .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is
+ not fired off when a dialect makes use of the
+ ``skip_user_error_events`` execution option. This is used
+ by dialects which intend to catch SQLAlchemy-specific exceptions
+ within specific operations, such as when the MySQL dialect detects
+ a table not present within the ``has_table()`` dialect method.
+ Prior to 1.0.0, code which implements :meth:`.handle_error` needs
+ to ensure that exceptions thrown in these scenarios are re-raised
+ without modification.
+
+ """
+
+ def engine_connect(self, conn, branch):
+ """Intercept the creation of a new :class:`_engine.Connection`.
+
+ This event is called typically as the direct result of calling
+ the :meth:`_engine.Engine.connect` method.
+
+ It differs from the :meth:`_events.PoolEvents.connect` method, which
+ refers to the actual connection to a database at the DBAPI level;
+ a DBAPI connection may be pooled and reused for many operations.
+ In contrast, this event refers only to the production of a higher level
+ :class:`_engine.Connection` wrapper around such a DBAPI connection.
+
+ It also differs from the :meth:`_events.PoolEvents.checkout` event
+ in that it is specific to the :class:`_engine.Connection` object,
+ not the
+ DBAPI connection that :meth:`_events.PoolEvents.checkout` deals with,
+ although
+ this DBAPI connection is available here via the
+ :attr:`_engine.Connection.connection` attribute.
+ But note there can in fact
+ be multiple :meth:`_events.PoolEvents.checkout`
+ events within the lifespan
+ of a single :class:`_engine.Connection` object, if that
+ :class:`_engine.Connection`
+ is invalidated and re-established. There can also be multiple
+ :class:`_engine.Connection`
+ objects generated for the same already-checked-out
+ DBAPI connection, in the case that a "branch" of a
+ :class:`_engine.Connection`
+ is produced.
+
+ :param conn: :class:`_engine.Connection` object.
+ :param branch: if True, this is a "branch" of an existing
+ :class:`_engine.Connection`. A branch is generated within the course
+ of a statement execution to invoke supplemental statements, most
+ typically to pre-execute a SELECT of a default value for the purposes
+ of an INSERT statement.
+
+ .. seealso::
+
+ :meth:`_events.PoolEvents.checkout`
+ the lower-level pool checkout event
+ for an individual DBAPI connection
+
+ """
+
+ def set_connection_execution_options(self, conn, opts):
+ """Intercept when the :meth:`_engine.Connection.execution_options`
+ method is called.
+
+ This method is called after the new :class:`_engine.Connection`
+ has been
+ produced, with the newly updated execution options collection, but
+ before the :class:`.Dialect` has acted upon any of those new options.
+
+ Note that this method is not called when a new
+ :class:`_engine.Connection`
+ is produced which is inheriting execution options from its parent
+ :class:`_engine.Engine`; to intercept this condition, use the
+ :meth:`_events.ConnectionEvents.engine_connect` event.
+
+ :param conn: The newly copied :class:`_engine.Connection` object
+
+ :param opts: dictionary of options that were passed to the
+ :meth:`_engine.Connection.execution_options` method.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`_events.ConnectionEvents.set_engine_execution_options`
+ - event
+ which is called when :meth:`_engine.Engine.execution_options`
+ is called.
+
+
+ """
+
+ def set_engine_execution_options(self, engine, opts):
+ """Intercept when the :meth:`_engine.Engine.execution_options`
+ method is called.
+
+ The :meth:`_engine.Engine.execution_options` method produces a shallow
+ copy of the :class:`_engine.Engine` which stores the new options.
+ That new
+ :class:`_engine.Engine` is passed here.
+ A particular application of this
+ method is to add a :meth:`_events.ConnectionEvents.engine_connect`
+ event
+ handler to the given :class:`_engine.Engine`
+ which will perform some per-
+ :class:`_engine.Connection` task specific to these execution options.
+
+ :param conn: The newly copied :class:`_engine.Engine` object
+
+ :param opts: dictionary of options that were passed to the
+ :meth:`_engine.Connection.execution_options` method.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`_events.ConnectionEvents.set_connection_execution_options`
+ - event
+ which is called when :meth:`_engine.Connection.execution_options`
+ is
+ called.
+
+ """
+
+ def engine_disposed(self, engine):
+ """Intercept when the :meth:`_engine.Engine.dispose` method is called.
+
+ The :meth:`_engine.Engine.dispose` method instructs the engine to
+ "dispose" of it's connection pool (e.g. :class:`_pool.Pool`), and
+ replaces it with a new one. Disposing of the old pool has the
+ effect that existing checked-in connections are closed. The new
+ pool does not establish any new connections until it is first used.
+
+ This event can be used to indicate that resources related to the
+ :class:`_engine.Engine` should also be cleaned up,
+ keeping in mind that the
+ :class:`_engine.Engine`
+ can still be used for new requests in which case
+ it re-acquires connection resources.
+
+ .. versionadded:: 1.0.5
+
+ """
+
+ def begin(self, conn):
+ """Intercept begin() events.
+
+ :param conn: :class:`_engine.Connection` object
+
+ """
+
+ def rollback(self, conn):
+ """Intercept rollback() events, as initiated by a
+ :class:`.Transaction`.
+
+ Note that the :class:`_pool.Pool` also "auto-rolls back"
+ a DBAPI connection upon checkin, if the ``reset_on_return``
+ flag is set to its default value of ``'rollback'``.
+ To intercept this
+ rollback, use the :meth:`_events.PoolEvents.reset` hook.
+
+ :param conn: :class:`_engine.Connection` object
+
+ .. seealso::
+
+ :meth:`_events.PoolEvents.reset`
+
+ """
+
+ def commit(self, conn):
+ """Intercept commit() events, as initiated by a
+ :class:`.Transaction`.
+
+ Note that the :class:`_pool.Pool` may also "auto-commit"
+ a DBAPI connection upon checkin, if the ``reset_on_return``
+ flag is set to the value ``'commit'``. To intercept this
+ commit, use the :meth:`_events.PoolEvents.reset` hook.
+
+ :param conn: :class:`_engine.Connection` object
+ """
+
+ def savepoint(self, conn, name):
+ """Intercept savepoint() events.
+
+ :param conn: :class:`_engine.Connection` object
+ :param name: specified name used for the savepoint.
+
+ """
+
+ def rollback_savepoint(self, conn, name, context):
+ """Intercept rollback_savepoint() events.
+
+ :param conn: :class:`_engine.Connection` object
+ :param name: specified name used for the savepoint.
+ :param context: not used
+
+ """
+ # TODO: deprecate "context"
+
+ def release_savepoint(self, conn, name, context):
+ """Intercept release_savepoint() events.
+
+ :param conn: :class:`_engine.Connection` object
+ :param name: specified name used for the savepoint.
+ :param context: not used
+
+ """
+ # TODO: deprecate "context"
+
+ def begin_twophase(self, conn, xid):
+ """Intercept begin_twophase() events.
+
+ :param conn: :class:`_engine.Connection` object
+ :param xid: two-phase XID identifier
+
+ """
+
+ def prepare_twophase(self, conn, xid):
+ """Intercept prepare_twophase() events.
+
+ :param conn: :class:`_engine.Connection` object
+ :param xid: two-phase XID identifier
+ """
+
+ def rollback_twophase(self, conn, xid, is_prepared):
+ """Intercept rollback_twophase() events.
+
+ :param conn: :class:`_engine.Connection` object
+ :param xid: two-phase XID identifier
+ :param is_prepared: boolean, indicates if
+ :meth:`.TwoPhaseTransaction.prepare` was called.
+
+ """
+
+ def commit_twophase(self, conn, xid, is_prepared):
+ """Intercept commit_twophase() events.
+
+ :param conn: :class:`_engine.Connection` object
+ :param xid: two-phase XID identifier
+ :param is_prepared: boolean, indicates if
+ :meth:`.TwoPhaseTransaction.prepare` was called.
+
+ """
+
+
+class DialectEvents(event.Events):
+ """event interface for execution-replacement functions.
+
+ These events allow direct instrumentation and replacement
+ of key dialect functions which interact with the DBAPI.
+
+ .. note::
+
+ :class:`.DialectEvents` hooks should be considered **semi-public**
+ and experimental.
+ These hooks are not for general use and are only for those situations
+ where intricate re-statement of DBAPI mechanics must be injected onto
+ an existing dialect. For general-use statement-interception events,
+ please use the :class:`_events.ConnectionEvents` interface.
+
+ .. seealso::
+
+ :meth:`_events.ConnectionEvents.before_cursor_execute`
+
+ :meth:`_events.ConnectionEvents.before_execute`
+
+ :meth:`_events.ConnectionEvents.after_cursor_execute`
+
+ :meth:`_events.ConnectionEvents.after_execute`
+
+
+ .. versionadded:: 0.9.4
+
+ """
+
+ _target_class_doc = "SomeEngine"
+ _dispatch_target = Dialect
+
+ @classmethod
+ def _listen(cls, event_key, retval=False):
+ target = event_key.dispatch_target
+
+ target._has_events = True
+ event_key.base_listen()
+
+ @classmethod
+ def _accept_with(cls, target):
+ if isinstance(target, type):
+ if issubclass(target, Engine):
+ return Dialect
+ elif issubclass(target, Dialect):
+ return target
+ elif isinstance(target, Engine):
+ return target.dialect
+ elif isinstance(target, Dialect):
+ return target
+ elif hasattr(target, "dispatch") and hasattr(
+ target.dispatch._events, "_no_async_engine_events"
+ ):
+ target.dispatch._events._no_async_engine_events()
+ else:
+ return None
+
+ def do_connect(self, dialect, conn_rec, cargs, cparams):
+ """Receive connection arguments before a connection is made.
+
+ This event is useful in that it allows the handler to manipulate the
+ cargs and/or cparams collections that control how the DBAPI
+ ``connect()`` function will be called. ``cargs`` will always be a
+ Python list that can be mutated in-place, and ``cparams`` a Python
+ dictionary that may also be mutated::
+
+ e = create_engine("postgresql+psycopg2://user@host/dbname")
+
+ @event.listens_for(e, 'do_connect')
+ def receive_do_connect(dialect, conn_rec, cargs, cparams):
+ cparams["password"] = "some_password"
+
+ The event hook may also be used to override the call to ``connect()``
+ entirely, by returning a non-``None`` DBAPI connection object::
+
+ e = create_engine("postgresql+psycopg2://user@host/dbname")
+
+ @event.listens_for(e, 'do_connect')
+ def receive_do_connect(dialect, conn_rec, cargs, cparams):
+ return psycopg2.connect(*cargs, **cparams)
+
+
+ .. versionadded:: 1.0.3
+
+ .. seealso::
+
+ :ref:`custom_dbapi_args`
+
+ """
+
+ def do_executemany(self, cursor, statement, parameters, context):
+ """Receive a cursor to have executemany() called.
+
+ Return the value True to halt further events from invoking,
+ and to indicate that the cursor execution has already taken
+ place within the event handler.
+
+ """
+
+ def do_execute_no_params(self, cursor, statement, context):
+ """Receive a cursor to have execute() with no parameters called.
+
+ Return the value True to halt further events from invoking,
+ and to indicate that the cursor execution has already taken
+ place within the event handler.
+
+ """
+
+ def do_execute(self, cursor, statement, parameters, context):
+ """Receive a cursor to have execute() called.
+
+ Return the value True to halt further events from invoking,
+ and to indicate that the cursor execution has already taken
+ place within the event handler.
+
+ """
+
+ def do_setinputsizes(
+ self, inputsizes, cursor, statement, parameters, context
+ ):
+ """Receive the setinputsizes dictionary for possible modification.
+
+ This event is emitted in the case where the dialect makes use of the
+ DBAPI ``cursor.setinputsizes()`` method which passes information about
+ parameter binding for a particular statement. The given
+ ``inputsizes`` dictionary will contain :class:`.BindParameter` objects
+ as keys, linked to DBAPI-specific type objects as values; for
+ parameters that are not bound, they are added to the dictionary with
+ ``None`` as the value, which means the parameter will not be included
+ in the ultimate setinputsizes call. The event may be used to inspect
+ and/or log the datatypes that are being bound, as well as to modify the
+ dictionary in place. Parameters can be added, modified, or removed
+ from this dictionary. Callers will typically want to inspect the
+ :attr:`.BindParameter.type` attribute of the given bind objects in
+ order to make decisions about the DBAPI object.
+
+ After the event, the ``inputsizes`` dictionary is converted into
+ an appropriate datastructure to be passed to ``cursor.setinputsizes``;
+ either a list for a positional bound parameter execution style,
+ or a dictionary of string parameter keys to DBAPI type objects for
+ a named bound parameter execution style.
+
+ The setinputsizes hook overall is only used for dialects which include
+ the flag ``use_setinputsizes=True``. Dialects which use this
+ include cx_Oracle, pg8000, asyncpg, and pyodbc dialects.
+
+ .. note::
+
+ For use with pyodbc, the ``use_setinputsizes`` flag
+ must be passed to the dialect, e.g.::
+
+ create_engine("mssql+pyodbc://...", use_setinputsizes=True)
+
+ .. seealso::
+
+ :ref:`mssql_pyodbc_setinputsizes`
+
+ .. versionadded:: 1.2.9
+
+ .. seealso::
+
+ :ref:`cx_oracle_setinputsizes`
+
+ """
+ pass
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
new file mode 100644
index 0000000..4f2524a
--- /dev/null
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -0,0 +1,1719 @@
+# engine/interfaces.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Define core interfaces used by the engine system."""
+
+from .. import util
+from ..sql.compiler import Compiled # noqa
+from ..sql.compiler import TypeCompiler # noqa
+from ..util.concurrency import await_only
+
+
+class Dialect(object):
+ """Define the behavior of a specific database and DB-API combination.
+
+ Any aspect of metadata definition, SQL query generation,
+ execution, result-set handling, or anything else which varies
+ between databases is defined under the general category of the
+ Dialect. The Dialect acts as a factory for other
+ database-specific object implementations including
+ ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
+
+ .. note:: Third party dialects should not subclass :class:`.Dialect`
+ directly. Instead, subclass :class:`.default.DefaultDialect` or
+ descendant class.
+
+ All dialects include the following attributes. There are many other
+ attributes that may be supported as well:
+
+ ``name``
+ identifying name for the dialect from a DBAPI-neutral point of view
+ (i.e. 'sqlite')
+
+ ``driver``
+ identifying name for the dialect's DBAPI
+
+ ``positional``
+ True if the paramstyle for this Dialect is positional.
+
+ ``paramstyle``
+ the paramstyle to be used (some DB-APIs support multiple
+ paramstyles).
+
+ ``encoding``
+ type of encoding to use for unicode, usually defaults to
+ 'utf-8'.
+
+ ``statement_compiler``
+ a :class:`.Compiled` class used to compile SQL statements
+
+ ``ddl_compiler``
+ a :class:`.Compiled` class used to compile DDL statements
+
+ ``server_version_info``
+ a tuple containing a version number for the DB backend in use.
+ This value is only available for supporting dialects, and is
+ typically populated during the initial connection to the database.
+
+ ``default_schema_name``
+ the name of the default schema. This value is only available for
+ supporting dialects, and is typically populated during the
+ initial connection to the database.
+
+ ``execution_ctx_cls``
+ a :class:`.ExecutionContext` class used to handle statement execution
+
+ ``execute_sequence_format``
+ either the 'tuple' or 'list' type, depending on what cursor.execute()
+ accepts for the second argument (they vary).
+
+ ``preparer``
+ a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
+ quote identifiers.
+
+ ``supports_alter``
+ ``True`` if the database supports ``ALTER TABLE`` - used only for
+ generating foreign key constraints in certain circumstances
+
+ ``max_identifier_length``
+ The maximum length of identifier names.
+
+ ``supports_sane_rowcount``
+ Indicate whether the dialect properly implements rowcount for
+ ``UPDATE`` and ``DELETE`` statements.
+
+ ``supports_sane_multi_rowcount``
+ Indicate whether the dialect properly implements rowcount for
+ ``UPDATE`` and ``DELETE`` statements when executed via
+ executemany.
+
+ ``preexecute_autoincrement_sequences``
+ True if 'implicit' primary key functions must be executed separately
+ in order to get their value. This is currently oriented towards
+ PostgreSQL.
+
+ ``implicit_returning``
+ use RETURNING or equivalent during INSERT execution in order to load
+ newly generated primary keys and other column defaults in one execution,
+ which are then available via inserted_primary_key.
+ If an insert statement has returning() specified explicitly,
+ the "implicit" functionality is not used and inserted_primary_key
+ will not be available.
+
+ ``colspecs``
+ A dictionary of TypeEngine classes from sqlalchemy.types mapped
+ to subclasses that are specific to the dialect class. This
+ dictionary is class-level only and is not accessed from the
+ dialect instance itself.
+
+ ``supports_default_values``
+ Indicates if the construct ``INSERT INTO tablename DEFAULT
+ VALUES`` is supported
+
+ ``supports_sequences``
+ Indicates if the dialect supports CREATE SEQUENCE or similar.
+
+ ``sequences_optional``
+ If True, indicates if the "optional" flag on the Sequence() construct
+ should signal to not generate a CREATE SEQUENCE. Applies only to
+ dialects that support sequences. Currently used only to allow PostgreSQL
+ SERIAL to be used on a column that specifies Sequence() for usage on
+ other backends.
+
+ ``supports_native_enum``
+ Indicates if the dialect supports a native ENUM construct.
+ This will prevent types.Enum from generating a CHECK
+ constraint when that type is used.
+
+ ``supports_native_boolean``
+ Indicates if the dialect supports a native boolean construct.
+ This will prevent types.Boolean from generating a CHECK
+ constraint when that type is used.
+
+ ``dbapi_exception_translation_map``
+ A dictionary of names that will contain as values the names of
+ pep-249 exceptions ("IntegrityError", "OperationalError", etc)
+ keyed to alternate class names, to support the case where a
+ DBAPI has exception classes that aren't named as they are
+ referred to (e.g. IntegrityError = MyException). In the vast
+ majority of cases this dictionary is empty.
+
+ .. versionadded:: 1.0.5
+
+ """
+
+ _has_events = False
+
+ supports_statement_cache = True
+ """indicates if this dialect supports caching.
+
+ All dialects that are compatible with statement caching should set this
+ flag to True directly on each dialect class and subclass that supports
+ it. SQLAlchemy tests that this flag is locally present on each dialect
+ subclass before it will use statement caching. This is to provide
+ safety for legacy or new dialects that are not yet fully tested to be
+ compliant with SQL statement caching.
+
+ .. versionadded:: 1.4.5
+
+ .. seealso::
+
+ :ref:`engine_thirdparty_caching`
+
+ """
+
+ def create_connect_args(self, url):
+ """Build DB-API compatible connection arguments.
+
+ Given a :class:`.URL` object, returns a tuple
+ consisting of a ``(*args, **kwargs)`` suitable to send directly
+ to the dbapi's connect function. The arguments are sent to the
+ :meth:`.Dialect.connect` method which then runs the DBAPI-level
+ ``connect()`` function.
+
+ The method typically makes use of the
+ :meth:`.URL.translate_connect_args`
+ method in order to generate a dictionary of options.
+
+ The default implementation is::
+
+ def create_connect_args(self, url):
+ opts = url.translate_connect_args()
+ opts.update(url.query)
+ return [[], opts]
+
+ :param url: a :class:`.URL` object
+
+ :return: a tuple of ``(*args, **kwargs)`` which will be passed to the
+ :meth:`.Dialect.connect` method.
+
+ .. seealso::
+
+ :meth:`.URL.translate_connect_args`
+
+ """
+
+ raise NotImplementedError()
+
+ @classmethod
+ def type_descriptor(cls, typeobj):
+ """Transform a generic type to a dialect-specific type.
+
+ Dialect classes will usually use the
+ :func:`_types.adapt_type` function in the types module to
+ accomplish this.
+
+ The returned result is cached *per dialect class* so can
+ contain no dialect-instance state.
+
+ """
+
+ raise NotImplementedError()
+
+ def initialize(self, connection):
+ """Called during strategized creation of the dialect with a
+ connection.
+
+ Allows dialects to configure options based on server version info or
+ other properties.
+
+ The connection passed here is a SQLAlchemy Connection object,
+ with full capabilities.
+
+ The initialize() method of the base dialect should be called via
+ super().
+
+ .. note:: as of SQLAlchemy 1.4, this method is called **before**
+ any :meth:`_engine.Dialect.on_connect` hooks are called.
+
+ """
+
+ pass
+
+ def get_columns(self, connection, table_name, schema=None, **kw):
+ """Return information about columns in `table_name`.
+
+ Given a :class:`_engine.Connection`, a string
+ `table_name`, and an optional string `schema`, return column
+ information as a list of dictionaries with these keys:
+
+ name
+ the column's name
+
+ type
+ [sqlalchemy.types#TypeEngine]
+
+ nullable
+ boolean
+
+ default
+ the column's default value
+
+ autoincrement
+ boolean
+
+ sequence
+ a dictionary of the form
+ {'name' : str, 'start' :int, 'increment': int, 'minvalue': int,
+ 'maxvalue': int, 'nominvalue': bool, 'nomaxvalue': bool,
+ 'cycle': bool, 'cache': int, 'order': bool}
+
+ Additional column attributes may be present.
+ """
+
+ raise NotImplementedError()
+
+ def get_pk_constraint(self, connection, table_name, schema=None, **kw):
+ """Return information about the primary key constraint on
+ table_name`.
+
+ Given a :class:`_engine.Connection`, a string
+ `table_name`, and an optional string `schema`, return primary
+ key information as a dictionary with these keys:
+
+ constrained_columns
+ a list of column names that make up the primary key
+
+ name
+ optional name of the primary key constraint.
+
+ """
+ raise NotImplementedError()
+
+ def get_foreign_keys(self, connection, table_name, schema=None, **kw):
+ """Return information about foreign_keys in `table_name`.
+
+ Given a :class:`_engine.Connection`, a string
+ `table_name`, and an optional string `schema`, return foreign
+ key information as a list of dicts with these keys:
+
+ name
+ the constraint's name
+
+ constrained_columns
+ a list of column names that make up the foreign key
+
+ referred_schema
+ the name of the referred schema
+
+ referred_table
+ the name of the referred table
+
+ referred_columns
+ a list of column names in the referred table that correspond to
+ constrained_columns
+ """
+
+ raise NotImplementedError()
+
+ def get_table_names(self, connection, schema=None, **kw):
+ """Return a list of table names for `schema`."""
+
+ raise NotImplementedError()
+
+ def get_temp_table_names(self, connection, schema=None, **kw):
+ """Return a list of temporary table names on the given connection,
+ if supported by the underlying backend.
+
+ """
+
+ raise NotImplementedError()
+
+ def get_view_names(self, connection, schema=None, **kw):
+ """Return a list of all view names available in the database.
+
+ :param schema: schema name to query, if not the default schema.
+ """
+
+ raise NotImplementedError()
+
+ def get_sequence_names(self, connection, schema=None, **kw):
+ """Return a list of all sequence names available in the database.
+
+ :param schema: schema name to query, if not the default schema.
+
+ .. versionadded:: 1.4
+ """
+
+ raise NotImplementedError()
+
+ def get_temp_view_names(self, connection, schema=None, **kw):
+ """Return a list of temporary view names on the given connection,
+ if supported by the underlying backend.
+
+ """
+
+ raise NotImplementedError()
+
+ def get_view_definition(self, connection, view_name, schema=None, **kw):
+ """Return view definition.
+
+ Given a :class:`_engine.Connection`, a string
+ `view_name`, and an optional string `schema`, return the view
+ definition.
+ """
+
+ raise NotImplementedError()
+
+ def get_indexes(self, connection, table_name, schema=None, **kw):
+ """Return information about indexes in `table_name`.
+
+ Given a :class:`_engine.Connection`, a string
+ `table_name` and an optional string `schema`, return index
+ information as a list of dictionaries with these keys:
+
+ name
+ the index's name
+
+ column_names
+ list of column names in order
+
+ unique
+ boolean
+ """
+
+ raise NotImplementedError()
+
+ def get_unique_constraints(
+ self, connection, table_name, schema=None, **kw
+ ):
+ r"""Return information about unique constraints in `table_name`.
+
+ Given a string `table_name` and an optional string `schema`, return
+ unique constraint information as a list of dicts with these keys:
+
+ name
+ the unique constraint's name
+
+ column_names
+ list of column names in order
+
+ \**kw
+ other options passed to the dialect's get_unique_constraints()
+ method.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ raise NotImplementedError()
+
+ def get_check_constraints(self, connection, table_name, schema=None, **kw):
+ r"""Return information about check constraints in `table_name`.
+
+ Given a string `table_name` and an optional string `schema`, return
+ check constraint information as a list of dicts with these keys:
+
+ * ``name`` -
+ the check constraint's name
+
+ * ``sqltext`` -
+ the check constraint's SQL expression
+
+ * ``**kw`` -
+ other options passed to the dialect's get_check_constraints()
+ method.
+
+ .. versionadded:: 1.1.0
+
+ """
+
+ raise NotImplementedError()
+
+ def get_table_comment(self, connection, table_name, schema=None, **kw):
+ r"""Return the "comment" for the table identified by `table_name`.
+
+ Given a string `table_name` and an optional string `schema`, return
+ table comment information as a dictionary with this key:
+
+ text
+ text of the comment
+
+ Raises ``NotImplementedError`` for dialects that don't support
+ comments.
+
+ .. versionadded:: 1.2
+
+ """
+
+ raise NotImplementedError()
+
+ def normalize_name(self, name):
+ """convert the given name to lowercase if it is detected as
+ case insensitive.
+
+ This method is only used if the dialect defines
+ requires_name_normalize=True.
+
+ """
+ raise NotImplementedError()
+
+ def denormalize_name(self, name):
+ """convert the given name to a case insensitive identifier
+ for the backend if it is an all-lowercase name.
+
+ This method is only used if the dialect defines
+ requires_name_normalize=True.
+
+ """
+ raise NotImplementedError()
+
+ def has_table(self, connection, table_name, schema=None, **kw):
+ """For internal dialect use, check the existence of a particular table
+ in the database.
+
+ Given a :class:`_engine.Connection` object, a string table_name and
+ optional schema name, return True if the given table exists in the
+ database, False otherwise.
+
+ This method serves as the underlying implementation of the
+ public facing :meth:`.Inspector.has_table` method, and is also used
+ internally to implement the "checkfirst" behavior for methods like
+ :meth:`_schema.Table.create` and :meth:`_schema.MetaData.create_all`.
+
+ .. note:: This method is used internally by SQLAlchemy, and is
+ published so that third-party dialects may provide an
+ implementation. It is **not** the public API for checking for table
+ presence. Please use the :meth:`.Inspector.has_table` method.
+ Alternatively, for legacy cross-compatibility, the
+ :meth:`_engine.Engine.has_table` method may be used.
+
+ """
+
+ raise NotImplementedError()
+
+ def has_index(self, connection, table_name, index_name, schema=None):
+ """Check the existence of a particular index name in the database.
+
+ Given a :class:`_engine.Connection` object, a string
+ `table_name` and string index name, return True if an index of the
+ given name on the given table exists, false otherwise.
+
+ The :class:`.DefaultDialect` implements this in terms of the
+ :meth:`.Dialect.has_table` and :meth:`.Dialect.get_indexes` methods,
+ however dialects can implement a more performant version.
+
+
+ .. versionadded:: 1.4
+
+ """
+
+ raise NotImplementedError()
+
+ def has_sequence(self, connection, sequence_name, schema=None, **kw):
+ """Check the existence of a particular sequence in the database.
+
+ Given a :class:`_engine.Connection` object and a string
+ `sequence_name`, return True if the given sequence exists in
+ the database, False otherwise.
+ """
+
+ raise NotImplementedError()
+
+ def _get_server_version_info(self, connection):
+ """Retrieve the server version info from the given connection.
+
+ This is used by the default implementation to populate the
+ "server_version_info" attribute and is called exactly
+ once upon first connect.
+
+ """
+
+ raise NotImplementedError()
+
+ def _get_default_schema_name(self, connection):
+ """Return the string name of the currently selected schema from
+ the given connection.
+
+ This is used by the default implementation to populate the
+ "default_schema_name" attribute and is called exactly
+ once upon first connect.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_begin(self, dbapi_connection):
+ """Provide an implementation of ``connection.begin()``, given a
+ DB-API connection.
+
+ The DBAPI has no dedicated "begin" method and it is expected
+ that transactions are implicit. This hook is provided for those
+ DBAPIs that might need additional help in this area.
+
+ Note that :meth:`.Dialect.do_begin` is not called unless a
+ :class:`.Transaction` object is in use. The
+ :meth:`.Dialect.do_autocommit`
+ hook is provided for DBAPIs that need some extra commands emitted
+ after a commit in order to enter the next transaction, when the
+ SQLAlchemy :class:`_engine.Connection`
+ is used in its default "autocommit"
+ mode.
+
+ :param dbapi_connection: a DBAPI connection, typically
+ proxied within a :class:`.ConnectionFairy`.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_rollback(self, dbapi_connection):
+ """Provide an implementation of ``connection.rollback()``, given
+ a DB-API connection.
+
+ :param dbapi_connection: a DBAPI connection, typically
+ proxied within a :class:`.ConnectionFairy`.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_commit(self, dbapi_connection):
+ """Provide an implementation of ``connection.commit()``, given a
+ DB-API connection.
+
+ :param dbapi_connection: a DBAPI connection, typically
+ proxied within a :class:`.ConnectionFairy`.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_close(self, dbapi_connection):
+ """Provide an implementation of ``connection.close()``, given a DBAPI
+ connection.
+
+ This hook is called by the :class:`_pool.Pool`
+ when a connection has been
+ detached from the pool, or is being returned beyond the normal
+ capacity of the pool.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_set_input_sizes(self, cursor, list_of_tuples, context):
+ """invoke the cursor.setinputsizes() method with appropriate arguments
+
+ This hook is called if the dialect.use_inputsizes flag is set to True.
+ Parameter data is passed in a list of tuples (paramname, dbtype,
+ sqltype), where ``paramname`` is the key of the parameter in the
+ statement, ``dbtype`` is the DBAPI datatype and ``sqltype`` is the
+ SQLAlchemy type. The order of tuples is in the correct parameter order.
+
+ .. versionadded:: 1.4
+
+
+ """
+ raise NotImplementedError()
+
+ def create_xid(self):
+ """Create a two-phase transaction ID.
+
+ This id will be passed to do_begin_twophase(),
+ do_rollback_twophase(), do_commit_twophase(). Its format is
+ unspecified.
+ """
+
+ raise NotImplementedError()
+
+ def do_savepoint(self, connection, name):
+ """Create a savepoint with the given name.
+
+ :param connection: a :class:`_engine.Connection`.
+ :param name: savepoint name.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_rollback_to_savepoint(self, connection, name):
+ """Rollback a connection to the named savepoint.
+
+ :param connection: a :class:`_engine.Connection`.
+ :param name: savepoint name.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_release_savepoint(self, connection, name):
+ """Release the named savepoint on a connection.
+
+ :param connection: a :class:`_engine.Connection`.
+ :param name: savepoint name.
+ """
+
+ raise NotImplementedError()
+
+ def do_begin_twophase(self, connection, xid):
+ """Begin a two phase transaction on the given connection.
+
+ :param connection: a :class:`_engine.Connection`.
+ :param xid: xid
+
+ """
+
+ raise NotImplementedError()
+
+ def do_prepare_twophase(self, connection, xid):
+ """Prepare a two phase transaction on the given connection.
+
+ :param connection: a :class:`_engine.Connection`.
+ :param xid: xid
+
+ """
+
+ raise NotImplementedError()
+
+ def do_rollback_twophase(
+ self, connection, xid, is_prepared=True, recover=False
+ ):
+ """Rollback a two phase transaction on the given connection.
+
+ :param connection: a :class:`_engine.Connection`.
+ :param xid: xid
+ :param is_prepared: whether or not
+ :meth:`.TwoPhaseTransaction.prepare` was called.
+ :param recover: if the recover flag was passed.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_commit_twophase(
+ self, connection, xid, is_prepared=True, recover=False
+ ):
+ """Commit a two phase transaction on the given connection.
+
+
+ :param connection: a :class:`_engine.Connection`.
+ :param xid: xid
+ :param is_prepared: whether or not
+ :meth:`.TwoPhaseTransaction.prepare` was called.
+ :param recover: if the recover flag was passed.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_recover_twophase(self, connection):
+ """Recover list of uncommitted prepared two phase transaction
+ identifiers on the given connection.
+
+ :param connection: a :class:`_engine.Connection`.
+
+ """
+
+ raise NotImplementedError()
+
+ def do_executemany(self, cursor, statement, parameters, context=None):
+ """Provide an implementation of ``cursor.executemany(statement,
+ parameters)``."""
+
+ raise NotImplementedError()
+
+ def do_execute(self, cursor, statement, parameters, context=None):
+ """Provide an implementation of ``cursor.execute(statement,
+ parameters)``."""
+
+ raise NotImplementedError()
+
+ def do_execute_no_params(
+ self, cursor, statement, parameters, context=None
+ ):
+ """Provide an implementation of ``cursor.execute(statement)``.
+
+ The parameter collection should not be sent.
+
+ """
+
+ raise NotImplementedError()
+
+ def is_disconnect(self, e, connection, cursor):
+ """Return True if the given DB-API error indicates an invalid
+ connection"""
+
+ raise NotImplementedError()
+
+ def connect(self, *cargs, **cparams):
+ r"""Establish a connection using this dialect's DBAPI.
+
+ The default implementation of this method is::
+
+ def connect(self, *cargs, **cparams):
+ return self.dbapi.connect(*cargs, **cparams)
+
+ The ``*cargs, **cparams`` parameters are generated directly
+ from this dialect's :meth:`.Dialect.create_connect_args` method.
+
+ This method may be used for dialects that need to perform programmatic
+ per-connection steps when a new connection is procured from the
+ DBAPI.
+
+
+ :param \*cargs: positional parameters returned from the
+ :meth:`.Dialect.create_connect_args` method
+
+ :param \*\*cparams: keyword parameters returned from the
+ :meth:`.Dialect.create_connect_args` method.
+
+ :return: a DBAPI connection, typically from the :pep:`249` module
+ level ``.connect()`` function.
+
+ .. seealso::
+
+ :meth:`.Dialect.create_connect_args`
+
+ :meth:`.Dialect.on_connect`
+
+ """
+
+ def on_connect_url(self, url):
+ """return a callable which sets up a newly created DBAPI connection.
+
+ This method is a new hook that supersedes the
+ :meth:`_engine.Dialect.on_connect` method when implemented by a
+ dialect. When not implemented by a dialect, it invokes the
+ :meth:`_engine.Dialect.on_connect` method directly to maintain
+ compatibility with existing dialects. There is no deprecation
+ for :meth:`_engine.Dialect.on_connect` expected.
+
+ The callable should accept a single argument "conn" which is the
+ DBAPI connection itself. The inner callable has no
+ return value.
+
+ E.g.::
+
+ class MyDialect(default.DefaultDialect):
+ # ...
+
+ def on_connect_url(self, url):
+ def do_on_connect(connection):
+ connection.execute("SET SPECIAL FLAGS etc")
+
+ return do_on_connect
+
+ This is used to set dialect-wide per-connection options such as
+ isolation modes, Unicode modes, etc.
+
+ This method differs from :meth:`_engine.Dialect.on_connect` in that
+ it is passed the :class:`_engine.URL` object that's relevant to the
+ connect args. Normally the only way to get this is from the
+ :meth:`_engine.Dialect.on_connect` hook is to look on the
+ :class:`_engine.Engine` itself, however this URL object may have been
+ replaced by plugins.
+
+ .. note::
+
+ The default implementation of
+ :meth:`_engine.Dialect.on_connect_url` is to invoke the
+ :meth:`_engine.Dialect.on_connect` method. Therefore if a dialect
+ implements this method, the :meth:`_engine.Dialect.on_connect`
+ method **will not be called** unless the overriding dialect calls
+ it directly from here.
+
+ .. versionadded:: 1.4.3 added :meth:`_engine.Dialect.on_connect_url`
+ which normally calls into :meth:`_engine.Dialect.on_connect`.
+
+ :param url: a :class:`_engine.URL` object representing the
+ :class:`_engine.URL` that was passed to the
+ :meth:`_engine.Dialect.create_connect_args` method.
+
+ :return: a callable that accepts a single DBAPI connection as an
+ argument, or None.
+
+ .. seealso::
+
+ :meth:`_engine.Dialect.on_connect`
+
+ """
+ return self.on_connect()
+
+ def on_connect(self):
+ """return a callable which sets up a newly created DBAPI connection.
+
+ The callable should accept a single argument "conn" which is the
+ DBAPI connection itself. The inner callable has no
+ return value.
+
+ E.g.::
+
+ class MyDialect(default.DefaultDialect):
+ # ...
+
+ def on_connect(self):
+ def do_on_connect(connection):
+ connection.execute("SET SPECIAL FLAGS etc")
+
+ return do_on_connect
+
+ This is used to set dialect-wide per-connection options such as
+ isolation modes, Unicode modes, etc.
+
+ The "do_on_connect" callable is invoked by using the
+ :meth:`_events.PoolEvents.connect` event
+ hook, then unwrapping the DBAPI connection and passing it into the
+ callable.
+
+ .. versionchanged:: 1.4 the on_connect hook is no longer called twice
+ for the first connection of a dialect. The on_connect hook is still
+ called before the :meth:`_engine.Dialect.initialize` method however.
+
+ .. versionchanged:: 1.4.3 the on_connect hook is invoked from a new
+ method on_connect_url that passes the URL that was used to create
+ the connect args. Dialects can implement on_connect_url instead
+ of on_connect if they need the URL object that was used for the
+ connection in order to get additional context.
+
+ If None is returned, no event listener is generated.
+
+ :return: a callable that accepts a single DBAPI connection as an
+ argument, or None.
+
+ .. seealso::
+
+ :meth:`.Dialect.connect` - allows the DBAPI ``connect()`` sequence
+ itself to be controlled.
+
+ :meth:`.Dialect.on_connect_url` - supersedes
+ :meth:`.Dialect.on_connect` to also receive the
+ :class:`_engine.URL` object in context.
+
+ """
+ return None
+
+ def reset_isolation_level(self, dbapi_conn):
+ """Given a DBAPI connection, revert its isolation to the default.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`_engine.Connection` and
+ :class:`_engine.Engine`
+ isolation level facilities; these APIs should be preferred for
+ most typical use cases.
+
+ .. seealso::
+
+ :meth:`_engine.Connection.get_isolation_level`
+ - view current level
+
+ :attr:`_engine.Connection.default_isolation_level`
+ - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`_engine.Connection` isolation level
+
+ :paramref:`_sa.create_engine.isolation_level` -
+ set per :class:`_engine.Engine` isolation level
+
+ """
+
+ raise NotImplementedError()
+
+ def set_isolation_level(self, dbapi_conn, level):
+ """Given a DBAPI connection, set its isolation level.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`_engine.Connection` and
+ :class:`_engine.Engine`
+ isolation level facilities; these APIs should be preferred for
+ most typical use cases.
+
+ .. seealso::
+
+ :meth:`_engine.Connection.get_isolation_level`
+ - view current level
+
+ :attr:`_engine.Connection.default_isolation_level`
+ - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`_engine.Connection` isolation level
+
+ :paramref:`_sa.create_engine.isolation_level` -
+ set per :class:`_engine.Engine` isolation level
+
+ """
+
+ raise NotImplementedError()
+
+ def get_isolation_level(self, dbapi_conn):
+ """Given a DBAPI connection, return its isolation level.
+
+ When working with a :class:`_engine.Connection` object,
+ the corresponding
+ DBAPI connection may be procured using the
+ :attr:`_engine.Connection.connection` accessor.
+
+ Note that this is a dialect-level method which is used as part
+ of the implementation of the :class:`_engine.Connection` and
+ :class:`_engine.Engine` isolation level facilities;
+ these APIs should be preferred for most typical use cases.
+
+
+ .. seealso::
+
+ :meth:`_engine.Connection.get_isolation_level`
+ - view current level
+
+ :attr:`_engine.Connection.default_isolation_level`
+ - view default level
+
+ :paramref:`.Connection.execution_options.isolation_level` -
+ set per :class:`_engine.Connection` isolation level
+
+ :paramref:`_sa.create_engine.isolation_level` -
+ set per :class:`_engine.Engine` isolation level
+
+
+ """
+
+ raise NotImplementedError()
+
+ def get_default_isolation_level(self, dbapi_conn):
+ """Given a DBAPI connection, return its isolation level, or
+ a default isolation level if one cannot be retrieved.
+
+ This method may only raise NotImplementedError and
+ **must not raise any other exception**, as it is used implicitly upon
+ first connect.
+
+ The method **must return a value** for a dialect that supports
+ isolation level settings, as this level is what will be reverted
+ towards when a per-connection isolation level change is made.
+
+ The method defaults to using the :meth:`.Dialect.get_isolation_level`
+ method unless overridden by a dialect.
+
+ .. versionadded:: 1.3.22
+
+ """
+ raise NotImplementedError()
+
+ @classmethod
+ def get_dialect_cls(cls, url):
+ """Given a URL, return the :class:`.Dialect` that will be used.
+
+ This is a hook that allows an external plugin to provide functionality
+ around an existing dialect, by allowing the plugin to be loaded
+ from the url based on an entrypoint, and then the plugin returns
+ the actual dialect to be used.
+
+ By default this just returns the cls.
+
+ .. versionadded:: 1.0.3
+
+ """
+ return cls
+
+ @classmethod
+ def load_provisioning(cls):
+ """set up the provision.py module for this dialect.
+
+ For dialects that include a provision.py module that sets up
+ provisioning followers, this method should initiate that process.
+
+ A typical implementation would be::
+
+ @classmethod
+ def load_provisioning(cls):
+ __import__("mydialect.provision")
+
+ The default method assumes a module named ``provision.py`` inside
+ the owning package of the current dialect, based on the ``__module__``
+ attribute::
+
+ @classmethod
+ def load_provisioning(cls):
+ package = ".".join(cls.__module__.split(".")[0:-1])
+ try:
+ __import__(package + ".provision")
+ except ImportError:
+ pass
+
+ .. versionadded:: 1.3.14
+
+ """
+
+ @classmethod
+ def engine_created(cls, engine):
+ """A convenience hook called before returning the final
+ :class:`_engine.Engine`.
+
+ If the dialect returned a different class from the
+ :meth:`.get_dialect_cls`
+ method, then the hook is called on both classes, first on
+ the dialect class returned by the :meth:`.get_dialect_cls` method and
+ then on the class on which the method was called.
+
+ The hook should be used by dialects and/or wrappers to apply special
+ events to the engine or its components. In particular, it allows
+ a dialect-wrapping class to apply dialect-level events.
+
+ .. versionadded:: 1.0.3
+
+ """
+
+ def get_driver_connection(self, connection):
+ """Returns the connection object as returned by the external driver
+ package.
+
+ For normal dialects that use a DBAPI compliant driver this call
+ will just return the ``connection`` passed as argument.
+ For dialects that instead adapt a non DBAPI compliant driver, like
+ when adapting an asyncio driver, this call will return the
+ connection-like object as returned by the driver.
+
+ .. versionadded:: 1.4.24
+
+ """
+ raise NotImplementedError()
+
+
+class CreateEnginePlugin(object):
+ """A set of hooks intended to augment the construction of an
+ :class:`_engine.Engine` object based on entrypoint names in a URL.
+
+ The purpose of :class:`_engine.CreateEnginePlugin` is to allow third-party
+ systems to apply engine, pool and dialect level event listeners without
+ the need for the target application to be modified; instead, the plugin
+ names can be added to the database URL. Target applications for
+ :class:`_engine.CreateEnginePlugin` include:
+
+ * connection and SQL performance tools, e.g. which use events to track
+ number of checkouts and/or time spent with statements
+
+ * connectivity plugins such as proxies
+
+ A rudimentary :class:`_engine.CreateEnginePlugin` that attaches a logger
+ to an :class:`_engine.Engine` object might look like::
+
+
+ import logging
+
+ from sqlalchemy.engine import CreateEnginePlugin
+ from sqlalchemy import event
+
+ class LogCursorEventsPlugin(CreateEnginePlugin):
+ def __init__(self, url, kwargs):
+ # consume the parameter "log_cursor_logging_name" from the
+ # URL query
+ logging_name = url.query.get("log_cursor_logging_name", "log_cursor")
+
+ self.log = logging.getLogger(logging_name)
+
+ def update_url(self, url):
+ "update the URL to one that no longer includes our parameters"
+ return url.difference_update_query(["log_cursor_logging_name"])
+
+ def engine_created(self, engine):
+ "attach an event listener after the new Engine is constructed"
+ event.listen(engine, "before_cursor_execute", self._log_event)
+
+
+ def _log_event(
+ self,
+ conn,
+ cursor,
+ statement,
+ parameters,
+ context,
+ executemany):
+
+ self.log.info("Plugin logged cursor event: %s", statement)
+
+
+
+ Plugins are registered using entry points in a similar way as that
+ of dialects::
+
+ entry_points={
+ 'sqlalchemy.plugins': [
+ 'log_cursor_plugin = myapp.plugins:LogCursorEventsPlugin'
+ ]
+
+ A plugin that uses the above names would be invoked from a database
+ URL as in::
+
+ from sqlalchemy import create_engine
+
+ engine = create_engine(
+ "mysql+pymysql://scott:tiger@localhost/test?"
+ "plugin=log_cursor_plugin&log_cursor_logging_name=mylogger"
+ )
+
+ The ``plugin`` URL parameter supports multiple instances, so that a URL
+ may specify multiple plugins; they are loaded in the order stated
+ in the URL::
+
+ engine = create_engine(
+ "mysql+pymysql://scott:tiger@localhost/test?"
+ "plugin=plugin_one&plugin=plugin_twp&plugin=plugin_three")
+
+ The plugin names may also be passed directly to :func:`_sa.create_engine`
+ using the :paramref:`_sa.create_engine.plugins` argument::
+
+ engine = create_engine(
+ "mysql+pymysql://scott:tiger@localhost/test",
+ plugins=["myplugin"])
+
+ .. versionadded:: 1.2.3 plugin names can also be specified
+ to :func:`_sa.create_engine` as a list
+
+ A plugin may consume plugin-specific arguments from the
+ :class:`_engine.URL` object as well as the ``kwargs`` dictionary, which is
+ the dictionary of arguments passed to the :func:`_sa.create_engine`
+ call. "Consuming" these arguments includes that they must be removed
+ when the plugin initializes, so that the arguments are not passed along
+ to the :class:`_engine.Dialect` constructor, where they will raise an
+ :class:`_exc.ArgumentError` because they are not known by the dialect.
+
+ As of version 1.4 of SQLAlchemy, arguments should continue to be consumed
+ from the ``kwargs`` dictionary directly, by removing the values with a
+ method such as ``dict.pop``. Arguments from the :class:`_engine.URL` object
+ should be consumed by implementing the
+ :meth:`_engine.CreateEnginePlugin.update_url` method, returning a new copy
+ of the :class:`_engine.URL` with plugin-specific parameters removed::
+
+ class MyPlugin(CreateEnginePlugin):
+ def __init__(self, url, kwargs):
+ self.my_argument_one = url.query['my_argument_one']
+ self.my_argument_two = url.query['my_argument_two']
+ self.my_argument_three = kwargs.pop('my_argument_three', None)
+
+ def update_url(self, url):
+ return url.difference_update_query(
+ ["my_argument_one", "my_argument_two"]
+ )
+
+ Arguments like those illustrated above would be consumed from a
+ :func:`_sa.create_engine` call such as::
+
+ from sqlalchemy import create_engine
+
+ engine = create_engine(
+ "mysql+pymysql://scott:tiger@localhost/test?"
+ "plugin=myplugin&my_argument_one=foo&my_argument_two=bar",
+ my_argument_three='bat'
+ )
+
+ .. versionchanged:: 1.4
+
+ The :class:`_engine.URL` object is now immutable; a
+ :class:`_engine.CreateEnginePlugin` that needs to alter the
+ :class:`_engine.URL` should implement the newly added
+ :meth:`_engine.CreateEnginePlugin.update_url` method, which
+ is invoked after the plugin is constructed.
+
+ For migration, construct the plugin in the following way, checking
+ for the existence of the :meth:`_engine.CreateEnginePlugin.update_url`
+ method to detect which version is running::
+
+ class MyPlugin(CreateEnginePlugin):
+ def __init__(self, url, kwargs):
+ if hasattr(CreateEnginePlugin, "update_url"):
+ # detect the 1.4 API
+ self.my_argument_one = url.query['my_argument_one']
+ self.my_argument_two = url.query['my_argument_two']
+ else:
+ # detect the 1.3 and earlier API - mutate the
+ # URL directly
+ self.my_argument_one = url.query.pop('my_argument_one')
+ self.my_argument_two = url.query.pop('my_argument_two')
+
+ self.my_argument_three = kwargs.pop('my_argument_three', None)
+
+ def update_url(self, url):
+ # this method is only called in the 1.4 version
+ return url.difference_update_query(
+ ["my_argument_one", "my_argument_two"]
+ )
+
+ .. seealso::
+
+ :ref:`change_5526` - overview of the :class:`_engine.URL` change which
+ also includes notes regarding :class:`_engine.CreateEnginePlugin`.
+
+
+ When the engine creation process completes and produces the
+ :class:`_engine.Engine` object, it is again passed to the plugin via the
+ :meth:`_engine.CreateEnginePlugin.engine_created` hook. In this hook, additional
+ changes can be made to the engine, most typically involving setup of
+ events (e.g. those defined in :ref:`core_event_toplevel`).
+
+ .. versionadded:: 1.1
+
+ """ # noqa: E501
+
+ def __init__(self, url, kwargs):
+ """Construct a new :class:`.CreateEnginePlugin`.
+
+ The plugin object is instantiated individually for each call
+ to :func:`_sa.create_engine`. A single :class:`_engine.
+ Engine` will be
+ passed to the :meth:`.CreateEnginePlugin.engine_created` method
+ corresponding to this URL.
+
+ :param url: the :class:`_engine.URL` object. The plugin may inspect
+ the :class:`_engine.URL` for arguments. Arguments used by the
+ plugin should be removed, by returning an updated :class:`_engine.URL`
+ from the :meth:`_engine.CreateEnginePlugin.update_url` method.
+
+ .. versionchanged:: 1.4
+
+ The :class:`_engine.URL` object is now immutable, so a
+ :class:`_engine.CreateEnginePlugin` that needs to alter the
+ :class:`_engine.URL` object should implement the
+ :meth:`_engine.CreateEnginePlugin.update_url` method.
+
+ :param kwargs: The keyword arguments passed to
+ :func:`_sa.create_engine`.
+
+ """
+ self.url = url
+
+ def update_url(self, url):
+ """Update the :class:`_engine.URL`.
+
+ A new :class:`_engine.URL` should be returned. This method is
+ typically used to consume configuration arguments from the
+ :class:`_engine.URL` which must be removed, as they will not be
+ recognized by the dialect. The
+ :meth:`_engine.URL.difference_update_query` method is available
+ to remove these arguments. See the docstring at
+ :class:`_engine.CreateEnginePlugin` for an example.
+
+
+ .. versionadded:: 1.4
+
+ """
+
+ def handle_dialect_kwargs(self, dialect_cls, dialect_args):
+ """parse and modify dialect kwargs"""
+
+ def handle_pool_kwargs(self, pool_cls, pool_args):
+ """parse and modify pool kwargs"""
+
+ def engine_created(self, engine):
+ """Receive the :class:`_engine.Engine`
+ object when it is fully constructed.
+
+ The plugin may make additional changes to the engine, such as
+ registering engine or connection pool events.
+
+ """
+
+
+class ExecutionContext(object):
+ """A messenger object for a Dialect that corresponds to a single
+ execution.
+
+ ExecutionContext should have these data members:
+
+ connection
+ Connection object which can be freely used by default value
+ generators to execute SQL. This Connection should reference the
+ same underlying connection/transactional resources of
+ root_connection.
+
+ root_connection
+ Connection object which is the source of this ExecutionContext. This
+ Connection may have close_with_result=True set, in which case it can
+ only be used once.
+
+ dialect
+ dialect which created this ExecutionContext.
+
+ cursor
+ DB-API cursor procured from the connection,
+
+ compiled
+ if passed to constructor, sqlalchemy.engine.base.Compiled object
+ being executed,
+
+ statement
+ string version of the statement to be executed. Is either
+ passed to the constructor, or must be created from the
+ sql.Compiled object by the time pre_exec() has completed.
+
+ parameters
+ bind parameters passed to the execute() method. For compiled
+ statements, this is a dictionary or list of dictionaries. For
+ textual statements, it should be in a format suitable for the
+ dialect's paramstyle (i.e. dict or list of dicts for non
+ positional, list or list of lists/tuples for positional).
+
+ isinsert
+ True if the statement is an INSERT.
+
+ isupdate
+ True if the statement is an UPDATE.
+
+ should_autocommit
+ True if the statement is a "committable" statement.
+
+ prefetch_cols
+ a list of Column objects for which a client-side default
+ was fired off. Applies to inserts and updates.
+
+ postfetch_cols
+ a list of Column objects for which a server-side default or
+ inline SQL expression value was fired off. Applies to inserts
+ and updates.
+ """
+
+ def create_cursor(self):
+ """Return a new cursor generated from this ExecutionContext's
+ connection.
+
+ Some dialects may wish to change the behavior of
+ connection.cursor(), such as postgresql which may return a PG
+ "server side" cursor.
+ """
+
+ raise NotImplementedError()
+
+ def pre_exec(self):
+ """Called before an execution of a compiled statement.
+
+ If a compiled statement was passed to this ExecutionContext,
+ the `statement` and `parameters` datamembers must be
+ initialized after this statement is complete.
+ """
+
+ raise NotImplementedError()
+
+ def get_out_parameter_values(self, out_param_names):
+ """Return a sequence of OUT parameter values from a cursor.
+
+ For dialects that support OUT parameters, this method will be called
+ when there is a :class:`.SQLCompiler` object which has the
+ :attr:`.SQLCompiler.has_out_parameters` flag set. This flag in turn
+ will be set to True if the statement itself has :class:`.BindParameter`
+ objects that have the ``.isoutparam`` flag set which are consumed by
+ the :meth:`.SQLCompiler.visit_bindparam` method. If the dialect
+ compiler produces :class:`.BindParameter` objects with ``.isoutparam``
+ set which are not handled by :meth:`.SQLCompiler.visit_bindparam`, it
+ should set this flag explicitly.
+
+ The list of names that were rendered for each bound parameter
+ is passed to the method. The method should then return a sequence of
+ values corresponding to the list of parameter objects. Unlike in
+ previous SQLAlchemy versions, the values can be the **raw values** from
+ the DBAPI; the execution context will apply the appropriate type
+ handler based on what's present in self.compiled.binds and update the
+ values. The processed dictionary will then be made available via the
+ ``.out_parameters`` collection on the result object. Note that
+ SQLAlchemy 1.4 has multiple kinds of result object as part of the 2.0
+ transition.
+
+ .. versionadded:: 1.4 - added
+ :meth:`.ExecutionContext.get_out_parameter_values`, which is invoked
+ automatically by the :class:`.DefaultExecutionContext` when there
+ are :class:`.BindParameter` objects with the ``.isoutparam`` flag
+ set. This replaces the practice of setting out parameters within
+ the now-removed ``get_result_proxy()`` method.
+
+ """
+ raise NotImplementedError()
+
+ def post_exec(self):
+ """Called after the execution of a compiled statement.
+
+ If a compiled statement was passed to this ExecutionContext,
+ the `last_insert_ids`, `last_inserted_params`, etc.
+ datamembers should be available after this method completes.
+ """
+
+ raise NotImplementedError()
+
+ def handle_dbapi_exception(self, e):
+ """Receive a DBAPI exception which occurred upon execute, result
+ fetch, etc."""
+
+ raise NotImplementedError()
+
+ def should_autocommit_text(self, statement):
+ """Parse the given textual statement and return True if it refers to
+ a "committable" statement"""
+
+ raise NotImplementedError()
+
+ def lastrow_has_defaults(self):
+ """Return True if the last INSERT or UPDATE row contained
+ inlined or database-side defaults.
+ """
+
+ raise NotImplementedError()
+
+ def get_rowcount(self):
+ """Return the DBAPI ``cursor.rowcount`` value, or in some
+ cases an interpreted value.
+
+ See :attr:`_engine.CursorResult.rowcount` for details on this.
+
+ """
+
+ raise NotImplementedError()
+
+
+@util.deprecated_20_cls(
+ ":class:`.Connectable`",
+ alternative=(
+ "The :class:`_engine.Engine` will be the only Core "
+ "object that features a .connect() method, and the "
+ ":class:`_engine.Connection` will be the only object that features "
+ "an .execute() method."
+ ),
+ constructor=None,
+)
+class Connectable(object):
+ """Interface for an object which supports execution of SQL constructs.
+
+ The two implementations of :class:`.Connectable` are
+ :class:`_engine.Connection` and :class:`_engine.Engine`.
+
+ Connectable must also implement the 'dialect' member which references a
+ :class:`.Dialect` instance.
+
+ """
+
+ def connect(self, **kwargs):
+ """Return a :class:`_engine.Connection` object.
+
+ Depending on context, this may be ``self`` if this object
+ is already an instance of :class:`_engine.Connection`, or a newly
+ procured :class:`_engine.Connection` if this object is an instance
+ of :class:`_engine.Engine`.
+
+ """
+
+ engine = None
+ """The :class:`_engine.Engine` instance referred to by this
+ :class:`.Connectable`.
+
+ May be ``self`` if this is already an :class:`_engine.Engine`.
+
+ """
+
+ def execute(self, object_, *multiparams, **params):
+ """Executes the given construct and returns a
+ :class:`_engine.CursorResult`.
+ """
+ raise NotImplementedError()
+
+ def scalar(self, object_, *multiparams, **params):
+ """Executes and returns the first column of the first row.
+
+ The underlying cursor is closed after execution.
+ """
+ raise NotImplementedError()
+
+ def _run_visitor(self, visitorcallable, element, **kwargs):
+ raise NotImplementedError()
+
+ def _execute_clauseelement(self, elem, multiparams=None, params=None):
+ raise NotImplementedError()
+
+
+class ExceptionContext(object):
+ """Encapsulate information about an error condition in progress.
+
+ This object exists solely to be passed to the
+ :meth:`_events.ConnectionEvents.handle_error` event,
+ supporting an interface that
+ can be extended without backwards-incompatibility.
+
+ .. versionadded:: 0.9.7
+
+ """
+
+ connection = None
+ """The :class:`_engine.Connection` in use during the exception.
+
+ This member is present, except in the case of a failure when
+ first connecting.
+
+ .. seealso::
+
+ :attr:`.ExceptionContext.engine`
+
+
+ """
+
+ engine = None
+ """The :class:`_engine.Engine` in use during the exception.
+
+ This member should always be present, even in the case of a failure
+ when first connecting.
+
+ .. versionadded:: 1.0.0
+
+ """
+
+ cursor = None
+ """The DBAPI cursor object.
+
+ May be None.
+
+ """
+
+ statement = None
+ """String SQL statement that was emitted directly to the DBAPI.
+
+ May be None.
+
+ """
+
+ parameters = None
+ """Parameter collection that was emitted directly to the DBAPI.
+
+ May be None.
+
+ """
+
+ original_exception = None
+ """The exception object which was caught.
+
+ This member is always present.
+
+ """
+
+ sqlalchemy_exception = None
+ """The :class:`sqlalchemy.exc.StatementError` which wraps the original,
+ and will be raised if exception handling is not circumvented by the event.
+
+ May be None, as not all exception types are wrapped by SQLAlchemy.
+ For DBAPI-level exceptions that subclass the dbapi's Error class, this
+ field will always be present.
+
+ """
+
+ chained_exception = None
+ """The exception that was returned by the previous handler in the
+ exception chain, if any.
+
+ If present, this exception will be the one ultimately raised by
+ SQLAlchemy unless a subsequent handler replaces it.
+
+ May be None.
+
+ """
+
+ execution_context = None
+ """The :class:`.ExecutionContext` corresponding to the execution
+ operation in progress.
+
+ This is present for statement execution operations, but not for
+ operations such as transaction begin/end. It also is not present when
+ the exception was raised before the :class:`.ExecutionContext`
+ could be constructed.
+
+ Note that the :attr:`.ExceptionContext.statement` and
+ :attr:`.ExceptionContext.parameters` members may represent a
+ different value than that of the :class:`.ExecutionContext`,
+ potentially in the case where a
+ :meth:`_events.ConnectionEvents.before_cursor_execute` event or similar
+ modified the statement/parameters to be sent.
+
+ May be None.
+
+ """
+
+ is_disconnect = None
+ """Represent whether the exception as occurred represents a "disconnect"
+ condition.
+
+ This flag will always be True or False within the scope of the
+ :meth:`_events.ConnectionEvents.handle_error` handler.
+
+ SQLAlchemy will defer to this flag in order to determine whether or not
+ the connection should be invalidated subsequently. That is, by
+ assigning to this flag, a "disconnect" event which then results in
+ a connection and pool invalidation can be invoked or prevented by
+ changing this flag.
+
+
+ .. note:: The pool "pre_ping" handler enabled using the
+ :paramref:`_sa.create_engine.pool_pre_ping` parameter does **not**
+ consult this event before deciding if the "ping" returned false,
+ as opposed to receiving an unhandled error. For this use case, the
+ :ref:`legacy recipe based on engine_connect() may be used
+ <pool_disconnects_pessimistic_custom>`. A future API allow more
+ comprehensive customization of the "disconnect" detection mechanism
+ across all functions.
+
+ """
+
+ invalidate_pool_on_disconnect = True
+ """Represent whether all connections in the pool should be invalidated
+ when a "disconnect" condition is in effect.
+
+ Setting this flag to False within the scope of the
+ :meth:`_events.ConnectionEvents.handle_error`
+ event will have the effect such
+ that the full collection of connections in the pool will not be
+ invalidated during a disconnect; only the current connection that is the
+ subject of the error will actually be invalidated.
+
+ The purpose of this flag is for custom disconnect-handling schemes where
+ the invalidation of other connections in the pool is to be performed
+ based on other conditions, or even on a per-connection basis.
+
+ .. versionadded:: 1.0.3
+
+ """
+
+
+class AdaptedConnection(object):
+ """Interface of an adapted connection object to support the DBAPI protocol.
+
+ Used by asyncio dialects to provide a sync-style pep-249 facade on top
+ of the asyncio connection/cursor API provided by the driver.
+
+ .. versionadded:: 1.4.24
+
+ """
+
+ __slots__ = ("_connection",)
+
+ @property
+ def driver_connection(self):
+ """The connection object as returned by the driver after a connect."""
+ return self._connection
+
+ def run_async(self, fn):
+ """Run the awaitable returned by the given function, which is passed
+ the raw asyncio driver connection.
+
+ This is used to invoke awaitable-only methods on the driver connection
+ within the context of a "synchronous" method, like a connection
+ pool event handler.
+
+ E.g.::
+
+ engine = create_async_engine(...)
+
+ @event.listens_for(engine.sync_engine, "connect")
+ def register_custom_types(dbapi_connection, ...):
+ dbapi_connection.run_async(
+ lambda connection: connection.set_type_codec(
+ 'MyCustomType', encoder, decoder, ...
+ )
+ )
+
+ .. versionadded:: 1.4.30
+
+ .. seealso::
+
+ :ref:`asyncio_events_run_async`
+
+ """
+ return await_only(fn(self._connection))
+
+ def __repr__(self):
+ return "<AdaptedConnection %s>" % self._connection
diff --git a/lib/sqlalchemy/engine/mock.py b/lib/sqlalchemy/engine/mock.py
new file mode 100644
index 0000000..6fcb09f
--- /dev/null
+++ b/lib/sqlalchemy/engine/mock.py
@@ -0,0 +1,118 @@
+# engine/mock.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from operator import attrgetter
+
+from . import base
+from . import url as _url
+from .. import util
+from ..sql import ddl
+
+
+class MockConnection(base.Connectable):
+ def __init__(self, dialect, execute):
+ self._dialect = dialect
+ self.execute = execute
+
+ engine = property(lambda s: s)
+ dialect = property(attrgetter("_dialect"))
+ name = property(lambda s: s._dialect.name)
+
+ def schema_for_object(self, obj):
+ return obj.schema
+
+ def connect(self, **kwargs):
+ return self
+
+ def execution_options(self, **kw):
+ return self
+
+ def compiler(self, statement, parameters, **kwargs):
+ return self._dialect.compiler(
+ statement, parameters, engine=self, **kwargs
+ )
+
+ def create(self, entity, **kwargs):
+ kwargs["checkfirst"] = False
+
+ ddl.SchemaGenerator(self.dialect, self, **kwargs).traverse_single(
+ entity
+ )
+
+ def drop(self, entity, **kwargs):
+ kwargs["checkfirst"] = False
+
+ ddl.SchemaDropper(self.dialect, self, **kwargs).traverse_single(entity)
+
+ def _run_ddl_visitor(
+ self, visitorcallable, element, connection=None, **kwargs
+ ):
+ kwargs["checkfirst"] = False
+ visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
+
+ def execute(self, object_, *multiparams, **params):
+ raise NotImplementedError()
+
+
+def create_mock_engine(url, executor, **kw):
+ """Create a "mock" engine used for echoing DDL.
+
+ This is a utility function used for debugging or storing the output of DDL
+ sequences as generated by :meth:`_schema.MetaData.create_all`
+ and related methods.
+
+ The function accepts a URL which is used only to determine the kind of
+ dialect to be used, as well as an "executor" callable function which
+ will receive a SQL expression object and parameters, which can then be
+ echoed or otherwise printed. The executor's return value is not handled,
+ nor does the engine allow regular string statements to be invoked, and
+ is therefore only useful for DDL that is sent to the database without
+ receiving any results.
+
+ E.g.::
+
+ from sqlalchemy import create_mock_engine
+
+ def dump(sql, *multiparams, **params):
+ print(sql.compile(dialect=engine.dialect))
+
+ engine = create_mock_engine('postgresql://', dump)
+ metadata.create_all(engine, checkfirst=False)
+
+ :param url: A string URL which typically needs to contain only the
+ database backend name.
+
+ :param executor: a callable which receives the arguments ``sql``,
+ ``*multiparams`` and ``**params``. The ``sql`` parameter is typically
+ an instance of :class:`.DDLElement`, which can then be compiled into a
+ string using :meth:`.DDLElement.compile`.
+
+ .. versionadded:: 1.4 - the :func:`.create_mock_engine` function replaces
+ the previous "mock" engine strategy used with
+ :func:`_sa.create_engine`.
+
+ .. seealso::
+
+ :ref:`faq_ddl_as_string`
+
+ """
+
+ # create url.URL object
+ u = _url.make_url(url)
+
+ dialect_cls = u.get_dialect()
+
+ dialect_args = {}
+ # consume dialect arguments from kwargs
+ for k in util.get_cls_kwargs(dialect_cls):
+ if k in kw:
+ dialect_args[k] = kw.pop(k)
+
+ # create dialect
+ dialect = dialect_cls(**dialect_args)
+
+ return MockConnection(dialect, executor)
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
new file mode 100644
index 0000000..b475228
--- /dev/null
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -0,0 +1,1160 @@
+# engine/reflection.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Provides an abstraction for obtaining database schema information.
+
+Usage Notes:
+
+Here are some general conventions when accessing the low level inspector
+methods such as get_table_names, get_columns, etc.
+
+1. Inspector methods return lists of dicts in most cases for the following
+ reasons:
+
+ * They're both standard types that can be serialized.
+ * Using a dict instead of a tuple allows easy expansion of attributes.
+ * Using a list for the outer structure maintains order and is easy to work
+ with (e.g. list comprehension [d['name'] for d in cols]).
+
+2. Records that contain a name, such as the column name in a column record
+ use the key 'name'. So for most return values, each record will have a
+ 'name' attribute..
+"""
+
+import contextlib
+
+from .base import Connectable
+from .base import Connection
+from .base import Engine
+from .. import exc
+from .. import inspection
+from .. import sql
+from .. import util
+from ..sql import operators
+from ..sql import schema as sa_schema
+from ..sql.type_api import TypeEngine
+from ..util import topological
+
+
+@util.decorator
+def cache(fn, self, con, *args, **kw):
+ info_cache = kw.get("info_cache", None)
+ if info_cache is None:
+ return fn(self, con, *args, **kw)
+ key = (
+ fn.__name__,
+ tuple(a for a in args if isinstance(a, util.string_types)),
+ tuple((k, v) for k, v in kw.items() if k != "info_cache"),
+ )
+ ret = info_cache.get(key)
+ if ret is None:
+ ret = fn(self, con, *args, **kw)
+ info_cache[key] = ret
+ return ret
+
+
+@inspection._self_inspects
+class Inspector(object):
+ """Performs database schema inspection.
+
+ The Inspector acts as a proxy to the reflection methods of the
+ :class:`~sqlalchemy.engine.interfaces.Dialect`, providing a
+ consistent interface as well as caching support for previously
+ fetched metadata.
+
+ A :class:`_reflection.Inspector` object is usually created via the
+ :func:`_sa.inspect` function, which may be passed an
+ :class:`_engine.Engine`
+ or a :class:`_engine.Connection`::
+
+ from sqlalchemy import inspect, create_engine
+ engine = create_engine('...')
+ insp = inspect(engine)
+
+ Where above, the :class:`~sqlalchemy.engine.interfaces.Dialect` associated
+ with the engine may opt to return an :class:`_reflection.Inspector`
+ subclass that
+ provides additional methods specific to the dialect's target database.
+
+ """
+
+ @util.deprecated(
+ "1.4",
+ "The __init__() method on :class:`_reflection.Inspector` "
+ "is deprecated and "
+ "will be removed in a future release. Please use the "
+ ":func:`.sqlalchemy.inspect` "
+ "function on an :class:`_engine.Engine` or "
+ ":class:`_engine.Connection` "
+ "in order to "
+ "acquire an :class:`_reflection.Inspector`.",
+ )
+ def __init__(self, bind):
+ """Initialize a new :class:`_reflection.Inspector`.
+
+ :param bind: a :class:`~sqlalchemy.engine.Connectable`,
+ which is typically an instance of
+ :class:`~sqlalchemy.engine.Engine` or
+ :class:`~sqlalchemy.engine.Connection`.
+
+ For a dialect-specific instance of :class:`_reflection.Inspector`, see
+ :meth:`_reflection.Inspector.from_engine`
+
+ """
+ return self._init_legacy(bind)
+
+ @classmethod
+ def _construct(cls, init, bind):
+
+ if hasattr(bind.dialect, "inspector"):
+ cls = bind.dialect.inspector
+
+ self = cls.__new__(cls)
+ init(self, bind)
+ return self
+
+ def _init_legacy(self, bind):
+ if hasattr(bind, "exec_driver_sql"):
+ self._init_connection(bind)
+ else:
+ self._init_engine(bind)
+
+ def _init_engine(self, engine):
+ self.bind = self.engine = engine
+ engine.connect().close()
+ self._op_context_requires_connect = True
+ self.dialect = self.engine.dialect
+ self.info_cache = {}
+
+ def _init_connection(self, connection):
+ self.bind = connection
+ self.engine = connection.engine
+ self._op_context_requires_connect = False
+ self.dialect = self.engine.dialect
+ self.info_cache = {}
+
+ @classmethod
+ @util.deprecated(
+ "1.4",
+ "The from_engine() method on :class:`_reflection.Inspector` "
+ "is deprecated and "
+ "will be removed in a future release. Please use the "
+ ":func:`.sqlalchemy.inspect` "
+ "function on an :class:`_engine.Engine` or "
+ ":class:`_engine.Connection` "
+ "in order to "
+ "acquire an :class:`_reflection.Inspector`.",
+ )
+ def from_engine(cls, bind):
+ """Construct a new dialect-specific Inspector object from the given
+ engine or connection.
+
+ :param bind: a :class:`~sqlalchemy.engine.Connectable`,
+ which is typically an instance of
+ :class:`~sqlalchemy.engine.Engine` or
+ :class:`~sqlalchemy.engine.Connection`.
+
+ This method differs from direct a direct constructor call of
+ :class:`_reflection.Inspector` in that the
+ :class:`~sqlalchemy.engine.interfaces.Dialect` is given a chance to
+ provide a dialect-specific :class:`_reflection.Inspector` instance,
+ which may
+ provide additional methods.
+
+ See the example at :class:`_reflection.Inspector`.
+
+ """
+ return cls._construct(cls._init_legacy, bind)
+
+ @inspection._inspects(Connectable)
+ def _connectable_insp(bind):
+ # this method should not be used unless some unusual case
+ # has subclassed "Connectable"
+
+ return Inspector._construct(Inspector._init_legacy, bind)
+
+ @inspection._inspects(Engine)
+ def _engine_insp(bind):
+ return Inspector._construct(Inspector._init_engine, bind)
+
+ @inspection._inspects(Connection)
+ def _connection_insp(bind):
+ return Inspector._construct(Inspector._init_connection, bind)
+
+ @contextlib.contextmanager
+ def _operation_context(self):
+ """Return a context that optimizes for multiple operations on a single
+ transaction.
+
+ This essentially allows connect()/close() to be called if we detected
+ that we're against an :class:`_engine.Engine` and not a
+ :class:`_engine.Connection`.
+
+ """
+ if self._op_context_requires_connect:
+ conn = self.bind.connect()
+ else:
+ conn = self.bind
+ try:
+ yield conn
+ finally:
+ if self._op_context_requires_connect:
+ conn.close()
+
+ @contextlib.contextmanager
+ def _inspection_context(self):
+ """Return an :class:`_reflection.Inspector`
+ from this one that will run all
+ operations on a single connection.
+
+ """
+
+ with self._operation_context() as conn:
+ sub_insp = self._construct(self.__class__._init_connection, conn)
+ sub_insp.info_cache = self.info_cache
+ yield sub_insp
+
+ @property
+ def default_schema_name(self):
+ """Return the default schema name presented by the dialect
+ for the current engine's database user.
+
+ E.g. this is typically ``public`` for PostgreSQL and ``dbo``
+ for SQL Server.
+
+ """
+ return self.dialect.default_schema_name
+
+ def get_schema_names(self):
+ """Return all schema names."""
+
+ if hasattr(self.dialect, "get_schema_names"):
+ with self._operation_context() as conn:
+ return self.dialect.get_schema_names(
+ conn, info_cache=self.info_cache
+ )
+ return []
+
+ def get_table_names(self, schema=None):
+ """Return all table names in referred to within a particular schema.
+
+ The names are expected to be real tables only, not views.
+ Views are instead returned using the
+ :meth:`_reflection.Inspector.get_view_names`
+ method.
+
+
+ :param schema: Schema name. If ``schema`` is left at ``None``, the
+ database's default schema is
+ used, else the named schema is searched. If the database does not
+ support named schemas, behavior is undefined if ``schema`` is not
+ passed as ``None``. For special quoting, use :class:`.quoted_name`.
+
+ .. seealso::
+
+ :meth:`_reflection.Inspector.get_sorted_table_and_fkc_names`
+
+ :attr:`_schema.MetaData.sorted_tables`
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_table_names(
+ conn, schema, info_cache=self.info_cache
+ )
+
+ def has_table(self, table_name, schema=None):
+ """Return True if the backend has a table of the given name.
+
+
+ :param table_name: name of the table to check
+ :param schema: schema name to query, if not the default schema.
+
+ .. versionadded:: 1.4 - the :meth:`.Inspector.has_table` method
+ replaces the :meth:`_engine.Engine.has_table` method.
+
+ """
+ # TODO: info_cache?
+ with self._operation_context() as conn:
+ return self.dialect.has_table(conn, table_name, schema)
+
+ def has_sequence(self, sequence_name, schema=None):
+ """Return True if the backend has a table of the given name.
+
+ :param sequence_name: name of the table to check
+ :param schema: schema name to query, if not the default schema.
+
+ .. versionadded:: 1.4
+
+ """
+ # TODO: info_cache?
+ with self._operation_context() as conn:
+ return self.dialect.has_sequence(conn, sequence_name, schema)
+
+ def get_sorted_table_and_fkc_names(self, schema=None):
+ """Return dependency-sorted table and foreign key constraint names in
+ referred to within a particular schema.
+
+ This will yield 2-tuples of
+ ``(tablename, [(tname, fkname), (tname, fkname), ...])``
+ consisting of table names in CREATE order grouped with the foreign key
+ constraint names that are not detected as belonging to a cycle.
+ The final element
+ will be ``(None, [(tname, fkname), (tname, fkname), ..])``
+ which will consist of remaining
+ foreign key constraint names that would require a separate CREATE
+ step after-the-fact, based on dependencies between tables.
+
+ .. versionadded:: 1.0.-
+
+ .. seealso::
+
+ :meth:`_reflection.Inspector.get_table_names`
+
+ :func:`.sort_tables_and_constraints` - similar method which works
+ with an already-given :class:`_schema.MetaData`.
+
+ """
+
+ with self._operation_context() as conn:
+ tnames = self.dialect.get_table_names(
+ conn, schema, info_cache=self.info_cache
+ )
+
+ tuples = set()
+ remaining_fkcs = set()
+
+ fknames_for_table = {}
+ for tname in tnames:
+ fkeys = self.get_foreign_keys(tname, schema)
+ fknames_for_table[tname] = set([fk["name"] for fk in fkeys])
+ for fkey in fkeys:
+ if tname != fkey["referred_table"]:
+ tuples.add((fkey["referred_table"], tname))
+ try:
+ candidate_sort = list(topological.sort(tuples, tnames))
+ except exc.CircularDependencyError as err:
+ for edge in err.edges:
+ tuples.remove(edge)
+ remaining_fkcs.update(
+ (edge[1], fkc) for fkc in fknames_for_table[edge[1]]
+ )
+
+ candidate_sort = list(topological.sort(tuples, tnames))
+ return [
+ (tname, fknames_for_table[tname].difference(remaining_fkcs))
+ for tname in candidate_sort
+ ] + [(None, list(remaining_fkcs))]
+
+ def get_temp_table_names(self):
+ """Return a list of temporary table names for the current bind.
+
+ This method is unsupported by most dialects; currently
+ only SQLite implements it.
+
+ .. versionadded:: 1.0.0
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_temp_table_names(
+ conn, info_cache=self.info_cache
+ )
+
+ def get_temp_view_names(self):
+ """Return a list of temporary view names for the current bind.
+
+ This method is unsupported by most dialects; currently
+ only SQLite implements it.
+
+ .. versionadded:: 1.0.0
+
+ """
+ with self._operation_context() as conn:
+ return self.dialect.get_temp_view_names(
+ conn, info_cache=self.info_cache
+ )
+
+ def get_table_options(self, table_name, schema=None, **kw):
+ """Return a dictionary of options specified when the table of the
+ given name was created.
+
+ This currently includes some options that apply to MySQL tables.
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ """
+ if hasattr(self.dialect, "get_table_options"):
+ with self._operation_context() as conn:
+ return self.dialect.get_table_options(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+ return {}
+
+ def get_view_names(self, schema=None):
+ """Return all view names in `schema`.
+
+ :param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_view_names(
+ conn, schema, info_cache=self.info_cache
+ )
+
+ def get_sequence_names(self, schema=None):
+ """Return all sequence names in `schema`.
+
+ :param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_sequence_names(
+ conn, schema, info_cache=self.info_cache
+ )
+
+ def get_view_definition(self, view_name, schema=None):
+ """Return definition for `view_name`.
+
+ :param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_view_definition(
+ conn, view_name, schema, info_cache=self.info_cache
+ )
+
+ def get_columns(self, table_name, schema=None, **kw):
+ """Return information about columns in `table_name`.
+
+ Given a string `table_name` and an optional string `schema`, return
+ column information as a list of dicts with these keys:
+
+ * ``name`` - the column's name
+
+ * ``type`` - the type of this column; an instance of
+ :class:`~sqlalchemy.types.TypeEngine`
+
+ * ``nullable`` - boolean flag if the column is NULL or NOT NULL
+
+ * ``default`` - the column's server default value - this is returned
+ as a string SQL expression.
+
+ * ``autoincrement`` - indicates that the column is auto incremented -
+ this is returned as a boolean or 'auto'
+
+ * ``comment`` - (optional) the comment on the column. Only some
+ dialects return this key
+
+ * ``computed`` - (optional) when present it indicates that this column
+ is computed by the database. Only some dialects return this key.
+ Returned as a dict with the keys:
+
+ * ``sqltext`` - the expression used to generate this column returned
+ as a string SQL expression
+
+ * ``persisted`` - (optional) boolean that indicates if the column is
+ stored in the table
+
+ .. versionadded:: 1.3.16 - added support for computed reflection.
+
+ * ``identity`` - (optional) when present it indicates that this column
+ is a generated always column. Only some dialects return this key.
+ For a list of keywords on this dict see :class:`_schema.Identity`.
+
+ .. versionadded:: 1.4 - added support for identity column reflection.
+
+ * ``dialect_options`` - (optional) a dict with dialect specific options
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ :return: list of dictionaries, each representing the definition of
+ a database column.
+
+ """
+
+ with self._operation_context() as conn:
+ col_defs = self.dialect.get_columns(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+ for col_def in col_defs:
+ # make this easy and only return instances for coltype
+ coltype = col_def["type"]
+ if not isinstance(coltype, TypeEngine):
+ col_def["type"] = coltype()
+ return col_defs
+
+ def get_pk_constraint(self, table_name, schema=None, **kw):
+ """Return information about primary key constraint on `table_name`.
+
+ Given a string `table_name`, and an optional string `schema`, return
+ primary key information as a dictionary with these keys:
+
+ * ``constrained_columns`` -
+ a list of column names that make up the primary key
+
+ * ``name`` -
+ optional name of the primary key constraint.
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ """
+ with self._operation_context() as conn:
+ return self.dialect.get_pk_constraint(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+
+ def get_foreign_keys(self, table_name, schema=None, **kw):
+ """Return information about foreign_keys in `table_name`.
+
+ Given a string `table_name`, and an optional string `schema`, return
+ foreign key information as a list of dicts with these keys:
+
+ * ``constrained_columns`` -
+ a list of column names that make up the foreign key
+
+ * ``referred_schema`` -
+ the name of the referred schema
+
+ * ``referred_table`` -
+ the name of the referred table
+
+ * ``referred_columns`` -
+ a list of column names in the referred table that correspond to
+ constrained_columns
+
+ * ``name`` -
+ optional name of the foreign key constraint.
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_foreign_keys(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+
+ def get_indexes(self, table_name, schema=None, **kw):
+ """Return information about indexes in `table_name`.
+
+ Given a string `table_name` and an optional string `schema`, return
+ index information as a list of dicts with these keys:
+
+ * ``name`` -
+ the index's name
+
+ * ``column_names`` -
+ list of column names in order
+
+ * ``unique`` -
+ boolean
+
+ * ``column_sorting`` -
+ optional dict mapping column names to tuple of sort keywords,
+ which may include ``asc``, ``desc``, ``nulls_first``, ``nulls_last``.
+
+ .. versionadded:: 1.3.5
+
+ * ``dialect_options`` -
+ dict of dialect-specific index options. May not be present
+ for all dialects.
+
+ .. versionadded:: 1.0.0
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_indexes(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+
+ def get_unique_constraints(self, table_name, schema=None, **kw):
+ """Return information about unique constraints in `table_name`.
+
+ Given a string `table_name` and an optional string `schema`, return
+ unique constraint information as a list of dicts with these keys:
+
+ * ``name`` -
+ the unique constraint's name
+
+ * ``column_names`` -
+ list of column names in order
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_unique_constraints(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+
+ def get_table_comment(self, table_name, schema=None, **kw):
+ """Return information about the table comment for ``table_name``.
+
+ Given a string ``table_name`` and an optional string ``schema``,
+ return table comment information as a dictionary with these keys:
+
+ * ``text`` -
+ text of the comment.
+
+ Raises ``NotImplementedError`` for a dialect that does not support
+ comments.
+
+ .. versionadded:: 1.2
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_table_comment(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+
+ def get_check_constraints(self, table_name, schema=None, **kw):
+ """Return information about check constraints in `table_name`.
+
+ Given a string `table_name` and an optional string `schema`, return
+ check constraint information as a list of dicts with these keys:
+
+ * ``name`` -
+ the check constraint's name
+
+ * ``sqltext`` -
+ the check constraint's SQL expression
+
+ * ``dialect_options`` -
+ may or may not be present; a dictionary with additional
+ dialect-specific options for this CHECK constraint
+
+ .. versionadded:: 1.3.8
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ .. versionadded:: 1.1.0
+
+ """
+
+ with self._operation_context() as conn:
+ return self.dialect.get_check_constraints(
+ conn, table_name, schema, info_cache=self.info_cache, **kw
+ )
+
+ @util.deprecated_20(
+ ":meth:`_reflection.Inspector.reflecttable`",
+ "The :meth:`_reflection.Inspector.reflecttable` "
+ "method was renamed to "
+ ":meth:`_reflection.Inspector.reflect_table`. This deprecated alias "
+ "will be removed in a future release.",
+ )
+ def reflecttable(self, *args, **kwargs):
+ "See reflect_table. This method name is deprecated"
+ return self.reflect_table(*args, **kwargs)
+
+ def reflect_table(
+ self,
+ table,
+ include_columns,
+ exclude_columns=(),
+ resolve_fks=True,
+ _extend_on=None,
+ ):
+ """Given a :class:`_schema.Table` object, load its internal
+ constructs based on introspection.
+
+ This is the underlying method used by most dialects to produce
+ table reflection. Direct usage is like::
+
+ from sqlalchemy import create_engine, MetaData, Table
+ from sqlalchemy import inspect
+
+ engine = create_engine('...')
+ meta = MetaData()
+ user_table = Table('user', meta)
+ insp = inspect(engine)
+ insp.reflect_table(user_table, None)
+
+ .. versionchanged:: 1.4 Renamed from ``reflecttable`` to
+ ``reflect_table``
+
+ :param table: a :class:`~sqlalchemy.schema.Table` instance.
+ :param include_columns: a list of string column names to include
+ in the reflection process. If ``None``, all columns are reflected.
+
+ """
+
+ if _extend_on is not None:
+ if table in _extend_on:
+ return
+ else:
+ _extend_on.add(table)
+
+ dialect = self.bind.dialect
+
+ with self._operation_context() as conn:
+ schema = conn.schema_for_object(table)
+
+ table_name = table.name
+
+ # get table-level arguments that are specifically
+ # intended for reflection, e.g. oracle_resolve_synonyms.
+ # these are unconditionally passed to related Table
+ # objects
+ reflection_options = dict(
+ (k, table.dialect_kwargs.get(k))
+ for k in dialect.reflection_options
+ if k in table.dialect_kwargs
+ )
+
+ # reflect table options, like mysql_engine
+ tbl_opts = self.get_table_options(
+ table_name, schema, **table.dialect_kwargs
+ )
+ if tbl_opts:
+ # add additional kwargs to the Table if the dialect
+ # returned them
+ table._validate_dialect_kwargs(tbl_opts)
+
+ if util.py2k:
+ if isinstance(schema, str):
+ schema = schema.decode(dialect.encoding)
+ if isinstance(table_name, str):
+ table_name = table_name.decode(dialect.encoding)
+
+ found_table = False
+ cols_by_orig_name = {}
+
+ for col_d in self.get_columns(
+ table_name, schema, **table.dialect_kwargs
+ ):
+ found_table = True
+
+ self._reflect_column(
+ table,
+ col_d,
+ include_columns,
+ exclude_columns,
+ cols_by_orig_name,
+ )
+
+ # NOTE: support tables/views with no columns
+ if not found_table and not self.has_table(table_name, schema):
+ raise exc.NoSuchTableError(table_name)
+
+ self._reflect_pk(
+ table_name, schema, table, cols_by_orig_name, exclude_columns
+ )
+
+ self._reflect_fk(
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ resolve_fks,
+ _extend_on,
+ reflection_options,
+ )
+
+ self._reflect_indexes(
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ reflection_options,
+ )
+
+ self._reflect_unique_constraints(
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ reflection_options,
+ )
+
+ self._reflect_check_constraints(
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ reflection_options,
+ )
+
+ self._reflect_table_comment(
+ table_name, schema, table, reflection_options
+ )
+
+ def _reflect_column(
+ self, table, col_d, include_columns, exclude_columns, cols_by_orig_name
+ ):
+
+ orig_name = col_d["name"]
+
+ table.metadata.dispatch.column_reflect(self, table, col_d)
+ table.dispatch.column_reflect(self, table, col_d)
+
+ # fetch name again as column_reflect is allowed to
+ # change it
+ name = col_d["name"]
+ if (include_columns and name not in include_columns) or (
+ exclude_columns and name in exclude_columns
+ ):
+ return
+
+ coltype = col_d["type"]
+
+ col_kw = dict(
+ (k, col_d[k])
+ for k in [
+ "nullable",
+ "autoincrement",
+ "quote",
+ "info",
+ "key",
+ "comment",
+ ]
+ if k in col_d
+ )
+
+ if "dialect_options" in col_d:
+ col_kw.update(col_d["dialect_options"])
+
+ colargs = []
+ if col_d.get("default") is not None:
+ default = col_d["default"]
+ if isinstance(default, sql.elements.TextClause):
+ default = sa_schema.DefaultClause(default, _reflected=True)
+ elif not isinstance(default, sa_schema.FetchedValue):
+ default = sa_schema.DefaultClause(
+ sql.text(col_d["default"]), _reflected=True
+ )
+
+ colargs.append(default)
+
+ if "computed" in col_d:
+ computed = sa_schema.Computed(**col_d["computed"])
+ colargs.append(computed)
+
+ if "identity" in col_d:
+ computed = sa_schema.Identity(**col_d["identity"])
+ colargs.append(computed)
+
+ if "sequence" in col_d:
+ self._reflect_col_sequence(col_d, colargs)
+
+ cols_by_orig_name[orig_name] = col = sa_schema.Column(
+ name, coltype, *colargs, **col_kw
+ )
+
+ if col.key in table.primary_key:
+ col.primary_key = True
+ table.append_column(col, replace_existing=True)
+
+ def _reflect_col_sequence(self, col_d, colargs):
+ if "sequence" in col_d:
+ # TODO: mssql and sybase are using this.
+ seq = col_d["sequence"]
+ sequence = sa_schema.Sequence(seq["name"], 1, 1)
+ if "start" in seq:
+ sequence.start = seq["start"]
+ if "increment" in seq:
+ sequence.increment = seq["increment"]
+ colargs.append(sequence)
+
+ def _reflect_pk(
+ self, table_name, schema, table, cols_by_orig_name, exclude_columns
+ ):
+ pk_cons = self.get_pk_constraint(
+ table_name, schema, **table.dialect_kwargs
+ )
+ if pk_cons:
+ pk_cols = [
+ cols_by_orig_name[pk]
+ for pk in pk_cons["constrained_columns"]
+ if pk in cols_by_orig_name and pk not in exclude_columns
+ ]
+
+ # update pk constraint name
+ table.primary_key.name = pk_cons.get("name")
+
+ # tell the PKConstraint to re-initialize
+ # its column collection
+ table.primary_key._reload(pk_cols)
+
+ def _reflect_fk(
+ self,
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ resolve_fks,
+ _extend_on,
+ reflection_options,
+ ):
+ fkeys = self.get_foreign_keys(
+ table_name, schema, **table.dialect_kwargs
+ )
+ for fkey_d in fkeys:
+ conname = fkey_d["name"]
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_columns = [
+ cols_by_orig_name[c].key if c in cols_by_orig_name else c
+ for c in fkey_d["constrained_columns"]
+ ]
+
+ if (
+ exclude_columns
+ and set(constrained_columns).intersection(exclude_columns)
+ or (
+ include_columns
+ and set(constrained_columns).difference(include_columns)
+ )
+ ):
+ continue
+
+ referred_schema = fkey_d["referred_schema"]
+ referred_table = fkey_d["referred_table"]
+ referred_columns = fkey_d["referred_columns"]
+ refspec = []
+ if referred_schema is not None:
+ if resolve_fks:
+ sa_schema.Table(
+ referred_table,
+ table.metadata,
+ schema=referred_schema,
+ autoload_with=self.bind,
+ _extend_on=_extend_on,
+ **reflection_options
+ )
+ for column in referred_columns:
+ refspec.append(
+ ".".join([referred_schema, referred_table, column])
+ )
+ else:
+ if resolve_fks:
+ sa_schema.Table(
+ referred_table,
+ table.metadata,
+ autoload_with=self.bind,
+ schema=sa_schema.BLANK_SCHEMA,
+ _extend_on=_extend_on,
+ **reflection_options
+ )
+ for column in referred_columns:
+ refspec.append(".".join([referred_table, column]))
+ if "options" in fkey_d:
+ options = fkey_d["options"]
+ else:
+ options = {}
+
+ table.append_constraint(
+ sa_schema.ForeignKeyConstraint(
+ constrained_columns,
+ refspec,
+ conname,
+ link_to_name=True,
+ **options
+ )
+ )
+
+ _index_sort_exprs = [
+ ("asc", operators.asc_op),
+ ("desc", operators.desc_op),
+ ("nulls_first", operators.nulls_first_op),
+ ("nulls_last", operators.nulls_last_op),
+ ]
+
+ def _reflect_indexes(
+ self,
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ reflection_options,
+ ):
+ # Indexes
+ indexes = self.get_indexes(table_name, schema)
+ for index_d in indexes:
+ name = index_d["name"]
+ columns = index_d["column_names"]
+ column_sorting = index_d.get("column_sorting", {})
+ unique = index_d["unique"]
+ flavor = index_d.get("type", "index")
+ dialect_options = index_d.get("dialect_options", {})
+
+ duplicates = index_d.get("duplicates_constraint")
+ if include_columns and not set(columns).issubset(include_columns):
+ util.warn(
+ "Omitting %s key for (%s), key covers omitted columns."
+ % (flavor, ", ".join(columns))
+ )
+ continue
+ if duplicates:
+ continue
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ idx_cols = []
+ for c in columns:
+ try:
+ idx_col = (
+ cols_by_orig_name[c]
+ if c in cols_by_orig_name
+ else table.c[c]
+ )
+ except KeyError:
+ util.warn(
+ "%s key '%s' was not located in "
+ "columns for table '%s'" % (flavor, c, table_name)
+ )
+ continue
+ c_sorting = column_sorting.get(c, ())
+ for k, op in self._index_sort_exprs:
+ if k in c_sorting:
+ idx_col = op(idx_col)
+ idx_cols.append(idx_col)
+
+ sa_schema.Index(
+ name,
+ *idx_cols,
+ _table=table,
+ **dict(list(dialect_options.items()) + [("unique", unique)])
+ )
+
+ def _reflect_unique_constraints(
+ self,
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ reflection_options,
+ ):
+
+ # Unique Constraints
+ try:
+ constraints = self.get_unique_constraints(table_name, schema)
+ except NotImplementedError:
+ # optional dialect feature
+ return
+
+ for const_d in constraints:
+ conname = const_d["name"]
+ columns = const_d["column_names"]
+ duplicates = const_d.get("duplicates_index")
+ if include_columns and not set(columns).issubset(include_columns):
+ util.warn(
+ "Omitting unique constraint key for (%s), "
+ "key covers omitted columns." % ", ".join(columns)
+ )
+ continue
+ if duplicates:
+ continue
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_cols = []
+ for c in columns:
+ try:
+ constrained_col = (
+ cols_by_orig_name[c]
+ if c in cols_by_orig_name
+ else table.c[c]
+ )
+ except KeyError:
+ util.warn(
+ "unique constraint key '%s' was not located in "
+ "columns for table '%s'" % (c, table_name)
+ )
+ else:
+ constrained_cols.append(constrained_col)
+ table.append_constraint(
+ sa_schema.UniqueConstraint(*constrained_cols, name=conname)
+ )
+
+ def _reflect_check_constraints(
+ self,
+ table_name,
+ schema,
+ table,
+ cols_by_orig_name,
+ include_columns,
+ exclude_columns,
+ reflection_options,
+ ):
+ try:
+ constraints = self.get_check_constraints(table_name, schema)
+ except NotImplementedError:
+ # optional dialect feature
+ return
+
+ for const_d in constraints:
+ table.append_constraint(sa_schema.CheckConstraint(**const_d))
+
+ def _reflect_table_comment(
+ self, table_name, schema, table, reflection_options
+ ):
+ try:
+ comment_dict = self.get_table_comment(table_name, schema)
+ except NotImplementedError:
+ return
+ else:
+ table.comment = comment_dict.get("text", None)
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
new file mode 100644
index 0000000..1fd4e1c
--- /dev/null
+++ b/lib/sqlalchemy/engine/result.py
@@ -0,0 +1,1857 @@
+# engine/result.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Define generic result set constructs."""
+
+
+import functools
+import itertools
+import operator
+
+from .row import _baserow_usecext
+from .row import Row
+from .. import exc
+from .. import util
+from ..sql.base import _generative
+from ..sql.base import HasMemoized
+from ..sql.base import InPlaceGenerative
+from ..util import collections_abc
+from ..util import py2k
+
+
+if _baserow_usecext:
+ from sqlalchemy.cresultproxy import tuplegetter
+
+ _row_as_tuple = tuplegetter
+else:
+
+ def tuplegetter(*indexes):
+ it = operator.itemgetter(*indexes)
+
+ if len(indexes) > 1:
+ return it
+ else:
+ return lambda row: (it(row),)
+
+ def _row_as_tuple(*indexes):
+ # circumvent LegacyRow.__getitem__ pointing to
+ # _get_by_key_impl_mapping for now. otherwise we could
+ # use itemgetter
+ getters = [
+ operator.methodcaller("_get_by_int_impl", index)
+ for index in indexes
+ ]
+ return lambda rec: tuple([getter(rec) for getter in getters])
+
+
+class ResultMetaData(object):
+ """Base for metadata about result rows."""
+
+ __slots__ = ()
+
+ _tuplefilter = None
+ _translated_indexes = None
+ _unique_filters = None
+
+ @property
+ def keys(self):
+ return RMKeyView(self)
+
+ def _has_key(self, key):
+ raise NotImplementedError()
+
+ def _for_freeze(self):
+ raise NotImplementedError()
+
+ def _key_fallback(self, key, err, raiseerr=True):
+ assert raiseerr
+ util.raise_(KeyError(key), replace_context=err)
+
+ def _warn_for_nonint(self, key):
+ util.warn_deprecated_20(
+ "Retrieving row members using strings or other non-integers is "
+ "deprecated; use row._mapping for a dictionary interface "
+ "to the row"
+ )
+
+ def _raise_for_nonint(self, key):
+ raise TypeError(
+ "TypeError: tuple indices must be integers or slices, not %s"
+ % type(key).__name__
+ )
+
+ def _index_for_key(self, keys, raiseerr):
+ raise NotImplementedError()
+
+ def _metadata_for_keys(self, key):
+ raise NotImplementedError()
+
+ def _reduce(self, keys):
+ raise NotImplementedError()
+
+ def _getter(self, key, raiseerr=True):
+
+ index = self._index_for_key(key, raiseerr)
+
+ if index is not None:
+ return operator.itemgetter(index)
+ else:
+ return None
+
+ def _row_as_tuple_getter(self, keys):
+ indexes = self._indexes_for_keys(keys)
+ return _row_as_tuple(*indexes)
+
+
+class RMKeyView(collections_abc.KeysView):
+ __slots__ = ("_parent", "_keys")
+
+ def __init__(self, parent):
+ self._parent = parent
+ self._keys = [k for k in parent._keys if k is not None]
+
+ def __len__(self):
+ return len(self._keys)
+
+ def __repr__(self):
+ return "{0.__class__.__name__}({0._keys!r})".format(self)
+
+ def __iter__(self):
+ return iter(self._keys)
+
+ def __contains__(self, item):
+ if not _baserow_usecext and isinstance(item, int):
+ return False
+
+ # note this also includes special key fallback behaviors
+ # which also don't seem to be tested in test_resultset right now
+ return self._parent._has_key(item)
+
+ def __eq__(self, other):
+ return list(other) == list(self)
+
+ def __ne__(self, other):
+ return list(other) != list(self)
+
+
+class SimpleResultMetaData(ResultMetaData):
+ """result metadata for in-memory collections."""
+
+ __slots__ = (
+ "_keys",
+ "_keymap",
+ "_processors",
+ "_tuplefilter",
+ "_translated_indexes",
+ "_unique_filters",
+ )
+
+ def __init__(
+ self,
+ keys,
+ extra=None,
+ _processors=None,
+ _tuplefilter=None,
+ _translated_indexes=None,
+ _unique_filters=None,
+ ):
+ self._keys = list(keys)
+ self._tuplefilter = _tuplefilter
+ self._translated_indexes = _translated_indexes
+ self._unique_filters = _unique_filters
+
+ if extra:
+ recs_names = [
+ (
+ (name,) + extras,
+ (index, name, extras),
+ )
+ for index, (name, extras) in enumerate(zip(self._keys, extra))
+ ]
+ else:
+ recs_names = [
+ ((name,), (index, name, ()))
+ for index, name in enumerate(self._keys)
+ ]
+
+ self._keymap = {key: rec for keys, rec in recs_names for key in keys}
+
+ self._processors = _processors
+
+ def _has_key(self, key):
+ return key in self._keymap
+
+ def _for_freeze(self):
+ unique_filters = self._unique_filters
+ if unique_filters and self._tuplefilter:
+ unique_filters = self._tuplefilter(unique_filters)
+
+ # TODO: are we freezing the result with or without uniqueness
+ # applied?
+ return SimpleResultMetaData(
+ self._keys,
+ extra=[self._keymap[key][2] for key in self._keys],
+ _unique_filters=unique_filters,
+ )
+
+ def __getstate__(self):
+ return {
+ "_keys": self._keys,
+ "_translated_indexes": self._translated_indexes,
+ }
+
+ def __setstate__(self, state):
+ if state["_translated_indexes"]:
+ _translated_indexes = state["_translated_indexes"]
+ _tuplefilter = tuplegetter(*_translated_indexes)
+ else:
+ _translated_indexes = _tuplefilter = None
+ self.__init__(
+ state["_keys"],
+ _translated_indexes=_translated_indexes,
+ _tuplefilter=_tuplefilter,
+ )
+
+ def _contains(self, value, row):
+ return value in row._data
+
+ def _index_for_key(self, key, raiseerr=True):
+ if int in key.__class__.__mro__:
+ key = self._keys[key]
+ try:
+ rec = self._keymap[key]
+ except KeyError as ke:
+ rec = self._key_fallback(key, ke, raiseerr)
+
+ return rec[0]
+
+ def _indexes_for_keys(self, keys):
+ return [self._keymap[key][0] for key in keys]
+
+ def _metadata_for_keys(self, keys):
+ for key in keys:
+ if int in key.__class__.__mro__:
+ key = self._keys[key]
+
+ try:
+ rec = self._keymap[key]
+ except KeyError as ke:
+ rec = self._key_fallback(key, ke, True)
+
+ yield rec
+
+ def _reduce(self, keys):
+ try:
+ metadata_for_keys = [
+ self._keymap[
+ self._keys[key] if int in key.__class__.__mro__ else key
+ ]
+ for key in keys
+ ]
+ except KeyError as ke:
+ self._key_fallback(ke.args[0], ke, True)
+
+ indexes, new_keys, extra = zip(*metadata_for_keys)
+
+ if self._translated_indexes:
+ indexes = [self._translated_indexes[idx] for idx in indexes]
+
+ tup = tuplegetter(*indexes)
+
+ new_metadata = SimpleResultMetaData(
+ new_keys,
+ extra=extra,
+ _tuplefilter=tup,
+ _translated_indexes=indexes,
+ _processors=self._processors,
+ _unique_filters=self._unique_filters,
+ )
+
+ return new_metadata
+
+
+def result_tuple(fields, extra=None):
+ parent = SimpleResultMetaData(fields, extra)
+ return functools.partial(
+ Row, parent, parent._processors, parent._keymap, Row._default_key_style
+ )
+
+
+# a symbol that indicates to internal Result methods that
+# "no row is returned". We can't use None for those cases where a scalar
+# filter is applied to rows.
+_NO_ROW = util.symbol("NO_ROW")
+
+
+class ResultInternal(InPlaceGenerative):
+ _real_result = None
+ _generate_rows = True
+ _unique_filter_state = None
+ _post_creational_filter = None
+ _is_cursor = False
+
+ @HasMemoized.memoized_attribute
+ def _row_getter(self):
+ real_result = self._real_result if self._real_result else self
+
+ if real_result._source_supports_scalars:
+ if not self._generate_rows:
+ return None
+ else:
+ _proc = real_result._process_row
+
+ def process_row(
+ metadata, processors, keymap, key_style, scalar_obj
+ ):
+ return _proc(
+ metadata, processors, keymap, key_style, (scalar_obj,)
+ )
+
+ else:
+ process_row = real_result._process_row
+
+ key_style = real_result._process_row._default_key_style
+ metadata = self._metadata
+
+ keymap = metadata._keymap
+ processors = metadata._processors
+ tf = metadata._tuplefilter
+
+ if tf and not real_result._source_supports_scalars:
+ if processors:
+ processors = tf(processors)
+
+ _make_row_orig = functools.partial(
+ process_row, metadata, processors, keymap, key_style
+ )
+
+ def make_row(row):
+ return _make_row_orig(tf(row))
+
+ else:
+ make_row = functools.partial(
+ process_row, metadata, processors, keymap, key_style
+ )
+
+ fns = ()
+
+ if real_result._row_logging_fn:
+ fns = (real_result._row_logging_fn,)
+ else:
+ fns = ()
+
+ if fns:
+ _make_row = make_row
+
+ def make_row(row):
+ row = _make_row(row)
+ for fn in fns:
+ row = fn(row)
+ return row
+
+ return make_row
+
+ @HasMemoized.memoized_attribute
+ def _iterator_getter(self):
+
+ make_row = self._row_getter
+
+ post_creational_filter = self._post_creational_filter
+
+ if self._unique_filter_state:
+ uniques, strategy = self._unique_strategy
+
+ def iterrows(self):
+ for row in self._fetchiter_impl():
+ obj = make_row(row) if make_row else row
+ hashed = strategy(obj) if strategy else obj
+ if hashed in uniques:
+ continue
+ uniques.add(hashed)
+ if post_creational_filter:
+ obj = post_creational_filter(obj)
+ yield obj
+
+ else:
+
+ def iterrows(self):
+ for row in self._fetchiter_impl():
+ row = make_row(row) if make_row else row
+ if post_creational_filter:
+ row = post_creational_filter(row)
+ yield row
+
+ return iterrows
+
+ def _raw_all_rows(self):
+ make_row = self._row_getter
+ rows = self._fetchall_impl()
+ return [make_row(row) for row in rows]
+
+ def _allrows(self):
+
+ post_creational_filter = self._post_creational_filter
+
+ make_row = self._row_getter
+
+ rows = self._fetchall_impl()
+ if make_row:
+ made_rows = [make_row(row) for row in rows]
+ else:
+ made_rows = rows
+
+ if self._unique_filter_state:
+ uniques, strategy = self._unique_strategy
+
+ rows = [
+ made_row
+ for made_row, sig_row in [
+ (
+ made_row,
+ strategy(made_row) if strategy else made_row,
+ )
+ for made_row in made_rows
+ ]
+ if sig_row not in uniques and not uniques.add(sig_row)
+ ]
+ else:
+ rows = made_rows
+
+ if post_creational_filter:
+ rows = [post_creational_filter(row) for row in rows]
+ return rows
+
+ @HasMemoized.memoized_attribute
+ def _onerow_getter(self):
+ make_row = self._row_getter
+
+ post_creational_filter = self._post_creational_filter
+
+ if self._unique_filter_state:
+ uniques, strategy = self._unique_strategy
+
+ def onerow(self):
+ _onerow = self._fetchone_impl
+ while True:
+ row = _onerow()
+ if row is None:
+ return _NO_ROW
+ else:
+ obj = make_row(row) if make_row else row
+ hashed = strategy(obj) if strategy else obj
+ if hashed in uniques:
+ continue
+ else:
+ uniques.add(hashed)
+ if post_creational_filter:
+ obj = post_creational_filter(obj)
+ return obj
+
+ else:
+
+ def onerow(self):
+ row = self._fetchone_impl()
+ if row is None:
+ return _NO_ROW
+ else:
+ row = make_row(row) if make_row else row
+ if post_creational_filter:
+ row = post_creational_filter(row)
+ return row
+
+ return onerow
+
+ @HasMemoized.memoized_attribute
+ def _manyrow_getter(self):
+ make_row = self._row_getter
+
+ post_creational_filter = self._post_creational_filter
+
+ if self._unique_filter_state:
+ uniques, strategy = self._unique_strategy
+
+ def filterrows(make_row, rows, strategy, uniques):
+ if make_row:
+ rows = [make_row(row) for row in rows]
+
+ if strategy:
+ made_rows = (
+ (made_row, strategy(made_row)) for made_row in rows
+ )
+ else:
+ made_rows = ((made_row, made_row) for made_row in rows)
+ return [
+ made_row
+ for made_row, sig_row in made_rows
+ if sig_row not in uniques and not uniques.add(sig_row)
+ ]
+
+ def manyrows(self, num):
+ collect = []
+
+ _manyrows = self._fetchmany_impl
+
+ if num is None:
+ # if None is passed, we don't know the default
+ # manyrows number, DBAPI has this as cursor.arraysize
+ # different DBAPIs / fetch strategies may be different.
+ # do a fetch to find what the number is. if there are
+ # only fewer rows left, then it doesn't matter.
+ real_result = (
+ self._real_result if self._real_result else self
+ )
+ if real_result._yield_per:
+ num_required = num = real_result._yield_per
+ else:
+ rows = _manyrows(num)
+ num = len(rows)
+ collect.extend(
+ filterrows(make_row, rows, strategy, uniques)
+ )
+ num_required = num - len(collect)
+ else:
+ num_required = num
+
+ while num_required:
+ rows = _manyrows(num_required)
+ if not rows:
+ break
+
+ collect.extend(
+ filterrows(make_row, rows, strategy, uniques)
+ )
+ num_required = num - len(collect)
+
+ if post_creational_filter:
+ collect = [post_creational_filter(row) for row in collect]
+ return collect
+
+ else:
+
+ def manyrows(self, num):
+ if num is None:
+ real_result = (
+ self._real_result if self._real_result else self
+ )
+ num = real_result._yield_per
+
+ rows = self._fetchmany_impl(num)
+ if make_row:
+ rows = [make_row(row) for row in rows]
+ if post_creational_filter:
+ rows = [post_creational_filter(row) for row in rows]
+ return rows
+
+ return manyrows
+
+ def _only_one_row(
+ self,
+ raise_for_second_row,
+ raise_for_none,
+ scalar,
+ ):
+ onerow = self._fetchone_impl
+
+ row = onerow(hard_close=True)
+ if row is None:
+ if raise_for_none:
+ raise exc.NoResultFound(
+ "No row was found when one was required"
+ )
+ else:
+ return None
+
+ if scalar and self._source_supports_scalars:
+ self._generate_rows = False
+ make_row = None
+ else:
+ make_row = self._row_getter
+
+ try:
+ row = make_row(row) if make_row else row
+ except:
+ self._soft_close(hard=True)
+ raise
+
+ if raise_for_second_row:
+ if self._unique_filter_state:
+ # for no second row but uniqueness, need to essentially
+ # consume the entire result :(
+ uniques, strategy = self._unique_strategy
+
+ existing_row_hash = strategy(row) if strategy else row
+
+ while True:
+ next_row = onerow(hard_close=True)
+ if next_row is None:
+ next_row = _NO_ROW
+ break
+
+ try:
+ next_row = make_row(next_row) if make_row else next_row
+
+ if strategy:
+ if existing_row_hash == strategy(next_row):
+ continue
+ elif row == next_row:
+ continue
+ # here, we have a row and it's different
+ break
+ except:
+ self._soft_close(hard=True)
+ raise
+ else:
+ next_row = onerow(hard_close=True)
+ if next_row is None:
+ next_row = _NO_ROW
+
+ if next_row is not _NO_ROW:
+ self._soft_close(hard=True)
+ raise exc.MultipleResultsFound(
+ "Multiple rows were found when exactly one was required"
+ if raise_for_none
+ else "Multiple rows were found when one or none "
+ "was required"
+ )
+ else:
+ next_row = _NO_ROW
+ # if we checked for second row then that would have
+ # closed us :)
+ self._soft_close(hard=True)
+
+ if not scalar:
+ post_creational_filter = self._post_creational_filter
+ if post_creational_filter:
+ row = post_creational_filter(row)
+
+ if scalar and make_row:
+ return row[0]
+ else:
+ return row
+
+ def _iter_impl(self):
+ return self._iterator_getter(self)
+
+ def _next_impl(self):
+ row = self._onerow_getter(self)
+ if row is _NO_ROW:
+ raise StopIteration()
+ else:
+ return row
+
+ @_generative
+ def _column_slices(self, indexes):
+ real_result = self._real_result if self._real_result else self
+
+ if real_result._source_supports_scalars and len(indexes) == 1:
+ util.warn_deprecated(
+ "The Result.columns() method has a bug in SQLAlchemy 1.4 that "
+ "is causing it to yield scalar values, rather than Row "
+ "objects, in the case where a single index is passed and the "
+ "result is against ORM mapped objects. In SQLAlchemy 2.0, "
+ "Result will continue yield Row objects in this scenario. "
+ "Use the Result.scalars() method to yield scalar values.",
+ "2.0",
+ )
+ self._generate_rows = False
+ else:
+ self._generate_rows = True
+ self._metadata = self._metadata._reduce(indexes)
+
+ @HasMemoized.memoized_attribute
+ def _unique_strategy(self):
+ uniques, strategy = self._unique_filter_state
+
+ real_result = (
+ self._real_result if self._real_result is not None else self
+ )
+
+ if not strategy and self._metadata._unique_filters:
+ if (
+ real_result._source_supports_scalars
+ and not self._generate_rows
+ ):
+ strategy = self._metadata._unique_filters[0]
+ else:
+ filters = self._metadata._unique_filters
+ if self._metadata._tuplefilter:
+ filters = self._metadata._tuplefilter(filters)
+
+ strategy = operator.methodcaller("_filter_on_values", filters)
+ return uniques, strategy
+
+
+class _WithKeys(object):
+ # used mainly to share documentation on the keys method.
+ # py2k does not allow overriding the __doc__ attribute.
+ def keys(self):
+ """Return an iterable view which yields the string keys that would
+ be represented by each :class:`.Row`.
+
+ The keys can represent the labels of the columns returned by a core
+ statement or the names of the orm classes returned by an orm
+ execution.
+
+ The view also can be tested for key containment using the Python
+ ``in`` operator, which will test both for the string keys represented
+ in the view, as well as for alternate keys such as column objects.
+
+ .. versionchanged:: 1.4 a key view object is returned rather than a
+ plain list.
+
+
+ """
+ return self._metadata.keys
+
+
+class Result(_WithKeys, ResultInternal):
+ """Represent a set of database results.
+
+ .. versionadded:: 1.4 The :class:`.Result` object provides a completely
+ updated usage model and calling facade for SQLAlchemy Core and
+ SQLAlchemy ORM. In Core, it forms the basis of the
+ :class:`.CursorResult` object which replaces the previous
+ :class:`.ResultProxy` interface. When using the ORM, a higher level
+ object called :class:`.ChunkedIteratorResult` is normally used.
+
+ .. note:: In SQLAlchemy 1.4 and above, this object is
+ used for ORM results returned by :meth:`_orm.Session.execute`, which can
+ yield instances of ORM mapped objects either individually or within
+ tuple-like rows. Note that the :class:`_result.Result` object does not
+ deduplicate instances or rows automatically as is the case with the
+ legacy :class:`_orm.Query` object. For in-Python de-duplication of
+ instances or rows, use the :meth:`_result.Result.unique` modifier
+ method.
+
+ .. seealso::
+
+ :ref:`tutorial_fetching_rows` - in the :doc:`/tutorial/index`
+
+ """
+
+ _process_row = Row
+
+ _row_logging_fn = None
+
+ _source_supports_scalars = False
+
+ _yield_per = None
+
+ _attributes = util.immutabledict()
+
+ def __init__(self, cursor_metadata):
+ self._metadata = cursor_metadata
+
+ def _soft_close(self, hard=False):
+ raise NotImplementedError()
+
+ def close(self):
+ """close this :class:`_result.Result`.
+
+ The behavior of this method is implementation specific, and is
+ not implemented by default. The method should generally end
+ the resources in use by the result object and also cause any
+ subsequent iteration or row fetching to raise
+ :class:`.ResourceClosedError`.
+
+ .. versionadded:: 1.4.27 - ``.close()`` was previously not generally
+ available for all :class:`_result.Result` classes, instead only
+ being available on the :class:`_engine.CursorResult` returned for
+ Core statement executions. As most other result objects, namely the
+ ones used by the ORM, are proxying a :class:`_engine.CursorResult`
+ in any case, this allows the underlying cursor result to be closed
+ from the outside facade for the case when the ORM query is using
+ the ``yield_per`` execution option where it does not immediately
+ exhaust and autoclose the database cursor.
+
+ """
+ self._soft_close(hard=True)
+
+ @_generative
+ def yield_per(self, num):
+ """Configure the row-fetching strategy to fetch ``num`` rows at a time.
+
+ This impacts the underlying behavior of the result when iterating over
+ the result object, or otherwise making use of methods such as
+ :meth:`_engine.Result.fetchone` that return one row at a time. Data
+ from the underlying cursor or other data source will be buffered up to
+ this many rows in memory, and the buffered collection will then be
+ yielded out one row at at time or as many rows are requested. Each time
+ the buffer clears, it will be refreshed to this many rows or as many
+ rows remain if fewer remain.
+
+ The :meth:`_engine.Result.yield_per` method is generally used in
+ conjunction with the
+ :paramref:`_engine.Connection.execution_options.stream_results`
+ execution option, which will allow the database dialect in use to make
+ use of a server side cursor, if the DBAPI supports a specific "server
+ side cursor" mode separate from its default mode of operation.
+
+ .. tip::
+
+ Consider using the
+ :paramref:`_engine.Connection.execution_options.yield_per`
+ execution option, which will simultaneously set
+ :paramref:`_engine.Connection.execution_options.stream_results`
+ to ensure the use of server side cursors, as well as automatically
+ invoke the :meth:`_engine.Result.yield_per` method to establish
+ a fixed row buffer size at once.
+
+ The :paramref:`_engine.Connection.execution_options.yield_per`
+ execution option is available for ORM operations, with
+ :class:`_orm.Session`-oriented use described at
+ :ref:`orm_queryguide_yield_per`. The Core-only version which works
+ with :class:`_engine.Connection` is new as of SQLAlchemy 1.4.40.
+
+ .. versionadded:: 1.4
+
+ :param num: number of rows to fetch each time the buffer is refilled.
+ If set to a value below 1, fetches all rows for the next buffer.
+
+ .. seealso::
+
+ :ref:`engine_stream_results` - describes Core behavior for
+ :meth:`_engine.Result.yield_per`
+
+ :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+
+ """
+ self._yield_per = num
+
+ @_generative
+ def unique(self, strategy=None):
+ """Apply unique filtering to the objects returned by this
+ :class:`_engine.Result`.
+
+ When this filter is applied with no arguments, the rows or objects
+ returned will filtered such that each row is returned uniquely. The
+ algorithm used to determine this uniqueness is by default the Python
+ hashing identity of the whole tuple. In some cases a specialized
+ per-entity hashing scheme may be used, such as when using the ORM, a
+ scheme is applied which works against the primary key identity of
+ returned objects.
+
+ The unique filter is applied **after all other filters**, which means
+ if the columns returned have been refined using a method such as the
+ :meth:`_engine.Result.columns` or :meth:`_engine.Result.scalars`
+ method, the uniquing is applied to **only the column or columns
+ returned**. This occurs regardless of the order in which these
+ methods have been called upon the :class:`_engine.Result` object.
+
+ The unique filter also changes the calculus used for methods like
+ :meth:`_engine.Result.fetchmany` and :meth:`_engine.Result.partitions`.
+ When using :meth:`_engine.Result.unique`, these methods will continue
+ to yield the number of rows or objects requested, after uniquing
+ has been applied. However, this necessarily impacts the buffering
+ behavior of the underlying cursor or datasource, such that multiple
+ underlying calls to ``cursor.fetchmany()`` may be necessary in order
+ to accumulate enough objects in order to provide a unique collection
+ of the requested size.
+
+ :param strategy: a callable that will be applied to rows or objects
+ being iterated, which should return an object that represents the
+ unique value of the row. A Python ``set()`` is used to store
+ these identities. If not passed, a default uniqueness strategy
+ is used which may have been assembled by the source of this
+ :class:`_engine.Result` object.
+
+ """
+ self._unique_filter_state = (set(), strategy)
+
+ def columns(self, *col_expressions):
+ r"""Establish the columns that should be returned in each row.
+
+ This method may be used to limit the columns returned as well
+ as to reorder them. The given list of expressions are normally
+ a series of integers or string key names. They may also be
+ appropriate :class:`.ColumnElement` objects which correspond to
+ a given statement construct.
+
+ E.g.::
+
+ statement = select(table.c.x, table.c.y, table.c.z)
+ result = connection.execute(statement)
+
+ for z, y in result.columns('z', 'y'):
+ # ...
+
+
+ Example of using the column objects from the statement itself::
+
+ for z, y in result.columns(
+ statement.selected_columns.c.z,
+ statement.selected_columns.c.y
+ ):
+ # ...
+
+ .. versionadded:: 1.4
+
+ :param \*col_expressions: indicates columns to be returned. Elements
+ may be integer row indexes, string column names, or appropriate
+ :class:`.ColumnElement` objects corresponding to a select construct.
+
+ :return: this :class:`_engine.Result` object with the modifications
+ given.
+
+ """
+ return self._column_slices(col_expressions)
+
+ def scalars(self, index=0):
+ """Return a :class:`_result.ScalarResult` filtering object which
+ will return single elements rather than :class:`_row.Row` objects.
+
+ E.g.::
+
+ >>> result = conn.execute(text("select int_id from table"))
+ >>> result.scalars().all()
+ [1, 2, 3]
+
+ When results are fetched from the :class:`_result.ScalarResult`
+ filtering object, the single column-row that would be returned by the
+ :class:`_result.Result` is instead returned as the column's value.
+
+ .. versionadded:: 1.4
+
+ :param index: integer or row key indicating the column to be fetched
+ from each row, defaults to ``0`` indicating the first column.
+
+ :return: a new :class:`_result.ScalarResult` filtering object referring
+ to this :class:`_result.Result` object.
+
+ """
+ return ScalarResult(self, index)
+
+ def _getter(self, key, raiseerr=True):
+ """return a callable that will retrieve the given key from a
+ :class:`.Row`.
+
+ """
+ if self._source_supports_scalars:
+ raise NotImplementedError(
+ "can't use this function in 'only scalars' mode"
+ )
+ return self._metadata._getter(key, raiseerr)
+
+ def _tuple_getter(self, keys):
+ """return a callable that will retrieve the given keys from a
+ :class:`.Row`.
+
+ """
+ if self._source_supports_scalars:
+ raise NotImplementedError(
+ "can't use this function in 'only scalars' mode"
+ )
+ return self._metadata._row_as_tuple_getter(keys)
+
+ def mappings(self):
+ """Apply a mappings filter to returned rows, returning an instance of
+ :class:`_result.MappingResult`.
+
+ When this filter is applied, fetching rows will return
+ :class:`.RowMapping` objects instead of :class:`.Row` objects.
+
+ .. versionadded:: 1.4
+
+ :return: a new :class:`_result.MappingResult` filtering object
+ referring to this :class:`_result.Result` object.
+
+ """
+
+ return MappingResult(self)
+
+ def _raw_row_iterator(self):
+ """Return a safe iterator that yields raw row data.
+
+ This is used by the :meth:`._engine.Result.merge` method
+ to merge multiple compatible results together.
+
+ """
+ raise NotImplementedError()
+
+ def _fetchiter_impl(self):
+ raise NotImplementedError()
+
+ def _fetchone_impl(self, hard_close=False):
+ raise NotImplementedError()
+
+ def _fetchall_impl(self):
+ raise NotImplementedError()
+
+ def _fetchmany_impl(self, size=None):
+ raise NotImplementedError()
+
+ def __iter__(self):
+ return self._iter_impl()
+
+ def __next__(self):
+ return self._next_impl()
+
+ if py2k:
+
+ def next(self): # noqa
+ return self._next_impl()
+
+ def partitions(self, size=None):
+ """Iterate through sub-lists of rows of the size given.
+
+ Each list will be of the size given, excluding the last list to
+ be yielded, which may have a small number of rows. No empty
+ lists will be yielded.
+
+ The result object is automatically closed when the iterator
+ is fully consumed.
+
+ Note that the backend driver will usually buffer the entire result
+ ahead of time unless the
+ :paramref:`.Connection.execution_options.stream_results` execution
+ option is used indicating that the driver should not pre-buffer
+ results, if possible. Not all drivers support this option and
+ the option is silently ignored for those who do not.
+
+ When using the ORM, the :meth:`_engine.Result.partitions` method
+ is typically more effective from a memory perspective when it is
+ combined with use of the
+ :ref:`yield_per execution option <orm_queryguide_yield_per>`,
+ which instructs both the DBAPI driver to use server side cursors,
+ if available, as well as instructs the ORM loading internals to only
+ build a certain amount of ORM objects from a result at a time before
+ yielding them out.
+
+ .. versionadded:: 1.4
+
+ :param size: indicate the maximum number of rows to be present
+ in each list yielded. If None, makes use of the value set by
+ the :meth:`_engine.Result.yield_per`, method, if it were called,
+ or the :paramref:`_engine.Connection.execution_options.yield_per`
+ execution option, which is equivalent in this regard. If
+ yield_per weren't set, it makes use of the
+ :meth:`_engine.Result.fetchmany` default, which may be backend
+ specific and not well defined.
+
+ :return: iterator of lists
+
+ .. seealso::
+
+ :ref:`engine_stream_results`
+
+ :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+
+
+ """
+
+ getter = self._manyrow_getter
+
+ while True:
+ partition = getter(self, size)
+ if partition:
+ yield partition
+ else:
+ break
+
+ def fetchall(self):
+ """A synonym for the :meth:`_engine.Result.all` method."""
+
+ return self._allrows()
+
+ def fetchone(self):
+ """Fetch one row.
+
+ When all rows are exhausted, returns None.
+
+ This method is provided for backwards compatibility with
+ SQLAlchemy 1.x.x.
+
+ To fetch the first row of a result only, use the
+ :meth:`_engine.Result.first` method. To iterate through all
+ rows, iterate the :class:`_engine.Result` object directly.
+
+ :return: a :class:`.Row` object if no filters are applied, or None
+ if no rows remain.
+
+ """
+ row = self._onerow_getter(self)
+ if row is _NO_ROW:
+ return None
+ else:
+ return row
+
+ def fetchmany(self, size=None):
+ """Fetch many rows.
+
+ When all rows are exhausted, returns an empty list.
+
+ This method is provided for backwards compatibility with
+ SQLAlchemy 1.x.x.
+
+ To fetch rows in groups, use the :meth:`._result.Result.partitions`
+ method.
+
+ :return: a list of :class:`.Row` objects.
+
+ """
+
+ return self._manyrow_getter(self, size)
+
+ def all(self):
+ """Return all rows in a list.
+
+ Closes the result set after invocation. Subsequent invocations
+ will return an empty list.
+
+ .. versionadded:: 1.4
+
+ :return: a list of :class:`.Row` objects.
+
+ """
+
+ return self._allrows()
+
+ def first(self):
+ """Fetch the first row or None if no row is present.
+
+ Closes the result set and discards remaining rows.
+
+ .. note:: This method returns one **row**, e.g. tuple, by default.
+ To return exactly one single scalar value, that is, the first
+ column of the first row, use the :meth:`.Result.scalar` method,
+ or combine :meth:`.Result.scalars` and :meth:`.Result.first`.
+
+ Additionally, in contrast to the behavior of the legacy ORM
+ :meth:`_orm.Query.first` method, **no limit is applied** to the
+ SQL query which was invoked to produce this :class:`_engine.Result`;
+ for a DBAPI driver that buffers results in memory before yielding
+ rows, all rows will be sent to the Python process and all but
+ the first row will be discarded.
+
+ .. seealso::
+
+ :ref:`migration_20_unify_select`
+
+ :return: a :class:`.Row` object, or None
+ if no rows remain.
+
+ .. seealso::
+
+ :meth:`_result.Result.scalar`
+
+ :meth:`_result.Result.one`
+
+ """
+
+ return self._only_one_row(
+ raise_for_second_row=False, raise_for_none=False, scalar=False
+ )
+
+ def one_or_none(self):
+ """Return at most one result or raise an exception.
+
+ Returns ``None`` if the result has no rows.
+ Raises :class:`.MultipleResultsFound`
+ if multiple rows are returned.
+
+ .. versionadded:: 1.4
+
+ :return: The first :class:`.Row` or None if no row is available.
+
+ :raises: :class:`.MultipleResultsFound`
+
+ .. seealso::
+
+ :meth:`_result.Result.first`
+
+ :meth:`_result.Result.one`
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=False, scalar=False
+ )
+
+ def scalar_one(self):
+ """Return exactly one scalar result or raise an exception.
+
+ This is equivalent to calling :meth:`.Result.scalars` and then
+ :meth:`.Result.one`.
+
+ .. seealso::
+
+ :meth:`.Result.one`
+
+ :meth:`.Result.scalars`
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=True, scalar=True
+ )
+
+ def scalar_one_or_none(self):
+ """Return exactly one or no scalar result.
+
+ This is equivalent to calling :meth:`.Result.scalars` and then
+ :meth:`.Result.one_or_none`.
+
+ .. seealso::
+
+ :meth:`.Result.one_or_none`
+
+ :meth:`.Result.scalars`
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=False, scalar=True
+ )
+
+ def one(self):
+ """Return exactly one row or raise an exception.
+
+ Raises :class:`.NoResultFound` if the result returns no
+ rows, or :class:`.MultipleResultsFound` if multiple rows
+ would be returned.
+
+ .. note:: This method returns one **row**, e.g. tuple, by default.
+ To return exactly one single scalar value, that is, the first
+ column of the first row, use the :meth:`.Result.scalar_one` method,
+ or combine :meth:`.Result.scalars` and :meth:`.Result.one`.
+
+ .. versionadded:: 1.4
+
+ :return: The first :class:`.Row`.
+
+ :raises: :class:`.MultipleResultsFound`, :class:`.NoResultFound`
+
+ .. seealso::
+
+ :meth:`_result.Result.first`
+
+ :meth:`_result.Result.one_or_none`
+
+ :meth:`_result.Result.scalar_one`
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=True, scalar=False
+ )
+
+ def scalar(self):
+ """Fetch the first column of the first row, and close the result set.
+
+ Returns None if there are no rows to fetch.
+
+ No validation is performed to test if additional rows remain.
+
+ After calling this method, the object is fully closed,
+ e.g. the :meth:`_engine.CursorResult.close`
+ method will have been called.
+
+ :return: a Python scalar value , or None if no rows remain.
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=False, raise_for_none=False, scalar=True
+ )
+
+ def freeze(self):
+ """Return a callable object that will produce copies of this
+ :class:`.Result` when invoked.
+
+ The callable object returned is an instance of
+ :class:`_engine.FrozenResult`.
+
+ This is used for result set caching. The method must be called
+ on the result when it has been unconsumed, and calling the method
+ will consume the result fully. When the :class:`_engine.FrozenResult`
+ is retrieved from a cache, it can be called any number of times where
+ it will produce a new :class:`_engine.Result` object each time
+ against its stored set of rows.
+
+ .. seealso::
+
+ :ref:`do_orm_execute_re_executing` - example usage within the
+ ORM to implement a result-set cache.
+
+ """
+
+ return FrozenResult(self)
+
+ def merge(self, *others):
+ """Merge this :class:`.Result` with other compatible result
+ objects.
+
+ The object returned is an instance of :class:`_engine.MergedResult`,
+ which will be composed of iterators from the given result
+ objects.
+
+ The new result will use the metadata from this result object.
+ The subsequent result objects must be against an identical
+ set of result / cursor metadata, otherwise the behavior is
+ undefined.
+
+ """
+ return MergedResult(self._metadata, (self,) + others)
+
+
+class FilterResult(ResultInternal):
+ """A wrapper for a :class:`_engine.Result` that returns objects other than
+ :class:`_result.Row` objects, such as dictionaries or scalar objects.
+
+ :class:`.FilterResult` is the common base for additional result
+ APIs including :class:`.MappingResult`, :class:`.ScalarResult`
+ and :class:`.AsyncResult`.
+
+ """
+
+ _post_creational_filter = None
+
+ @_generative
+ def yield_per(self, num):
+ """Configure the row-fetching strategy to fetch ``num`` rows at a time.
+
+ The :meth:`_engine.FilterResult.yield_per` method is a pass through
+ to the :meth:`_engine.Result.yield_per` method. See that method's
+ documentation for usage notes.
+
+ .. versionadded:: 1.4.40 - added :meth:`_engine.FilterResult.yield_per`
+ so that the method is available on all result set implementations
+
+ .. seealso::
+
+ :ref:`engine_stream_results` - describes Core behavior for
+ :meth:`_engine.Result.yield_per`
+
+ :ref:`orm_queryguide_yield_per` - in the :ref:`queryguide_toplevel`
+
+ """
+ self._real_result = self._real_result.yield_per(num)
+
+ def _soft_close(self, hard=False):
+ self._real_result._soft_close(hard=hard)
+
+ @property
+ def _attributes(self):
+ return self._real_result._attributes
+
+ def _fetchiter_impl(self):
+ return self._real_result._fetchiter_impl()
+
+ def _fetchone_impl(self, hard_close=False):
+ return self._real_result._fetchone_impl(hard_close=hard_close)
+
+ def _fetchall_impl(self):
+ return self._real_result._fetchall_impl()
+
+ def _fetchmany_impl(self, size=None):
+ return self._real_result._fetchmany_impl(size=size)
+
+
+class ScalarResult(FilterResult):
+ """A wrapper for a :class:`_result.Result` that returns scalar values
+ rather than :class:`_row.Row` values.
+
+ The :class:`_result.ScalarResult` object is acquired by calling the
+ :meth:`_result.Result.scalars` method.
+
+ A special limitation of :class:`_result.ScalarResult` is that it has
+ no ``fetchone()`` method; since the semantics of ``fetchone()`` are that
+ the ``None`` value indicates no more results, this is not compatible
+ with :class:`_result.ScalarResult` since there is no way to distinguish
+ between ``None`` as a row value versus ``None`` as an indicator. Use
+ ``next(result)`` to receive values individually.
+
+ """
+
+ _generate_rows = False
+
+ def __init__(self, real_result, index):
+ self._real_result = real_result
+
+ if real_result._source_supports_scalars:
+ self._metadata = real_result._metadata
+ self._post_creational_filter = None
+ else:
+ self._metadata = real_result._metadata._reduce([index])
+ self._post_creational_filter = operator.itemgetter(0)
+
+ self._unique_filter_state = real_result._unique_filter_state
+
+ def unique(self, strategy=None):
+ """Apply unique filtering to the objects returned by this
+ :class:`_engine.ScalarResult`.
+
+ See :meth:`_engine.Result.unique` for usage details.
+
+ """
+ self._unique_filter_state = (set(), strategy)
+ return self
+
+ def partitions(self, size=None):
+ """Iterate through sub-lists of elements of the size given.
+
+ Equivalent to :meth:`_result.Result.partitions` except that
+ scalar values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+
+ getter = self._manyrow_getter
+
+ while True:
+ partition = getter(self, size)
+ if partition:
+ yield partition
+ else:
+ break
+
+ def fetchall(self):
+ """A synonym for the :meth:`_engine.ScalarResult.all` method."""
+
+ return self._allrows()
+
+ def fetchmany(self, size=None):
+ """Fetch many objects.
+
+ Equivalent to :meth:`_result.Result.fetchmany` except that
+ scalar values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+ return self._manyrow_getter(self, size)
+
+ def all(self):
+ """Return all scalar values in a list.
+
+ Equivalent to :meth:`_result.Result.all` except that
+ scalar values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+ return self._allrows()
+
+ def __iter__(self):
+ return self._iter_impl()
+
+ def __next__(self):
+ return self._next_impl()
+
+ if py2k:
+
+ def next(self): # noqa
+ return self._next_impl()
+
+ def first(self):
+ """Fetch the first object or None if no object is present.
+
+ Equivalent to :meth:`_result.Result.first` except that
+ scalar values, rather than :class:`_result.Row` objects,
+ are returned.
+
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=False, raise_for_none=False, scalar=False
+ )
+
+ def one_or_none(self):
+ """Return at most one object or raise an exception.
+
+ Equivalent to :meth:`_result.Result.one_or_none` except that
+ scalar values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=False, scalar=False
+ )
+
+ def one(self):
+ """Return exactly one object or raise an exception.
+
+ Equivalent to :meth:`_result.Result.one` except that
+ scalar values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=True, scalar=False
+ )
+
+
+class MappingResult(_WithKeys, FilterResult):
+ """A wrapper for a :class:`_engine.Result` that returns dictionary values
+ rather than :class:`_engine.Row` values.
+
+ The :class:`_engine.MappingResult` object is acquired by calling the
+ :meth:`_engine.Result.mappings` method.
+
+ """
+
+ _generate_rows = True
+
+ _post_creational_filter = operator.attrgetter("_mapping")
+
+ def __init__(self, result):
+ self._real_result = result
+ self._unique_filter_state = result._unique_filter_state
+ self._metadata = result._metadata
+ if result._source_supports_scalars:
+ self._metadata = self._metadata._reduce([0])
+
+ def unique(self, strategy=None):
+ """Apply unique filtering to the objects returned by this
+ :class:`_engine.MappingResult`.
+
+ See :meth:`_engine.Result.unique` for usage details.
+
+ """
+ self._unique_filter_state = (set(), strategy)
+ return self
+
+ def columns(self, *col_expressions):
+ r"""Establish the columns that should be returned in each row."""
+ return self._column_slices(col_expressions)
+
+ def partitions(self, size=None):
+ """Iterate through sub-lists of elements of the size given.
+
+ Equivalent to :meth:`_result.Result.partitions` except that
+ mapping values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+
+ getter = self._manyrow_getter
+
+ while True:
+ partition = getter(self, size)
+ if partition:
+ yield partition
+ else:
+ break
+
+ def fetchall(self):
+ """A synonym for the :meth:`_engine.MappingResult.all` method."""
+
+ return self._allrows()
+
+ def fetchone(self):
+ """Fetch one object.
+
+ Equivalent to :meth:`_result.Result.fetchone` except that
+ mapping values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+
+ row = self._onerow_getter(self)
+ if row is _NO_ROW:
+ return None
+ else:
+ return row
+
+ def fetchmany(self, size=None):
+ """Fetch many objects.
+
+ Equivalent to :meth:`_result.Result.fetchmany` except that
+ mapping values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+
+ return self._manyrow_getter(self, size)
+
+ def all(self):
+ """Return all scalar values in a list.
+
+ Equivalent to :meth:`_result.Result.all` except that
+ mapping values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+
+ return self._allrows()
+
+ def __iter__(self):
+ return self._iter_impl()
+
+ def __next__(self):
+ return self._next_impl()
+
+ if py2k:
+
+ def next(self): # noqa
+ return self._next_impl()
+
+ def first(self):
+ """Fetch the first object or None if no object is present.
+
+ Equivalent to :meth:`_result.Result.first` except that
+ mapping values, rather than :class:`_result.Row` objects,
+ are returned.
+
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=False, raise_for_none=False, scalar=False
+ )
+
+ def one_or_none(self):
+ """Return at most one object or raise an exception.
+
+ Equivalent to :meth:`_result.Result.one_or_none` except that
+ mapping values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=False, scalar=False
+ )
+
+ def one(self):
+ """Return exactly one object or raise an exception.
+
+ Equivalent to :meth:`_result.Result.one` except that
+ mapping values, rather than :class:`_result.Row` objects,
+ are returned.
+
+ """
+ return self._only_one_row(
+ raise_for_second_row=True, raise_for_none=True, scalar=False
+ )
+
+
+class FrozenResult(object):
+ """Represents a :class:`.Result` object in a "frozen" state suitable
+ for caching.
+
+ The :class:`_engine.FrozenResult` object is returned from the
+ :meth:`_engine.Result.freeze` method of any :class:`_engine.Result`
+ object.
+
+ A new iterable :class:`.Result` object is generated from a fixed
+ set of data each time the :class:`.FrozenResult` is invoked as
+ a callable::
+
+
+ result = connection.execute(query)
+
+ frozen = result.freeze()
+
+ unfrozen_result_one = frozen()
+
+ for row in unfrozen_result_one:
+ print(row)
+
+ unfrozen_result_two = frozen()
+ rows = unfrozen_result_two.all()
+
+ # ... etc
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :ref:`do_orm_execute_re_executing` - example usage within the
+ ORM to implement a result-set cache.
+
+ :func:`_orm.loading.merge_frozen_result` - ORM function to merge
+ a frozen result back into a :class:`_orm.Session`.
+
+ """
+
+ def __init__(self, result):
+ self.metadata = result._metadata._for_freeze()
+ self._source_supports_scalars = result._source_supports_scalars
+ self._attributes = result._attributes
+
+ if self._source_supports_scalars:
+ self.data = list(result._raw_row_iterator())
+ else:
+ self.data = result.fetchall()
+
+ def rewrite_rows(self):
+ if self._source_supports_scalars:
+ return [[elem] for elem in self.data]
+ else:
+ return [list(row) for row in self.data]
+
+ def with_new_rows(self, tuple_data):
+ fr = FrozenResult.__new__(FrozenResult)
+ fr.metadata = self.metadata
+ fr._attributes = self._attributes
+ fr._source_supports_scalars = self._source_supports_scalars
+
+ if self._source_supports_scalars:
+ fr.data = [d[0] for d in tuple_data]
+ else:
+ fr.data = tuple_data
+ return fr
+
+ def __call__(self):
+ result = IteratorResult(self.metadata, iter(self.data))
+ result._attributes = self._attributes
+ result._source_supports_scalars = self._source_supports_scalars
+ return result
+
+
+class IteratorResult(Result):
+ """A :class:`.Result` that gets data from a Python iterator of
+ :class:`.Row` objects.
+
+ .. versionadded:: 1.4
+
+ """
+
+ _hard_closed = False
+
+ def __init__(
+ self,
+ cursor_metadata,
+ iterator,
+ raw=None,
+ _source_supports_scalars=False,
+ ):
+ self._metadata = cursor_metadata
+ self.iterator = iterator
+ self.raw = raw
+ self._source_supports_scalars = _source_supports_scalars
+
+ def _soft_close(self, hard=False, **kw):
+ if hard:
+ self._hard_closed = True
+ if self.raw is not None:
+ self.raw._soft_close(hard=hard, **kw)
+ self.iterator = iter([])
+ self._reset_memoizations()
+
+ def _raise_hard_closed(self):
+ raise exc.ResourceClosedError("This result object is closed.")
+
+ def _raw_row_iterator(self):
+ return self.iterator
+
+ def _fetchiter_impl(self):
+ if self._hard_closed:
+ self._raise_hard_closed()
+ return self.iterator
+
+ def _fetchone_impl(self, hard_close=False):
+ if self._hard_closed:
+ self._raise_hard_closed()
+
+ row = next(self.iterator, _NO_ROW)
+ if row is _NO_ROW:
+ self._soft_close(hard=hard_close)
+ return None
+ else:
+ return row
+
+ def _fetchall_impl(self):
+ if self._hard_closed:
+ self._raise_hard_closed()
+
+ try:
+ return list(self.iterator)
+ finally:
+ self._soft_close()
+
+ def _fetchmany_impl(self, size=None):
+ if self._hard_closed:
+ self._raise_hard_closed()
+
+ return list(itertools.islice(self.iterator, 0, size))
+
+
+def null_result():
+ return IteratorResult(SimpleResultMetaData([]), iter([]))
+
+
+class ChunkedIteratorResult(IteratorResult):
+ """An :class:`.IteratorResult` that works from an iterator-producing
+ callable.
+
+ The given ``chunks`` argument is a function that is given a number of rows
+ to return in each chunk, or ``None`` for all rows. The function should
+ then return an un-consumed iterator of lists, each list of the requested
+ size.
+
+ The function can be called at any time again, in which case it should
+ continue from the same result set but adjust the chunk size as given.
+
+ .. versionadded:: 1.4
+
+ """
+
+ def __init__(
+ self,
+ cursor_metadata,
+ chunks,
+ source_supports_scalars=False,
+ raw=None,
+ dynamic_yield_per=False,
+ ):
+ self._metadata = cursor_metadata
+ self.chunks = chunks
+ self._source_supports_scalars = source_supports_scalars
+ self.raw = raw
+ self.iterator = itertools.chain.from_iterable(self.chunks(None))
+ self.dynamic_yield_per = dynamic_yield_per
+
+ @_generative
+ def yield_per(self, num):
+ # TODO: this throws away the iterator which may be holding
+ # onto a chunk. the yield_per cannot be changed once any
+ # rows have been fetched. either find a way to enforce this,
+ # or we can't use itertools.chain and will instead have to
+ # keep track.
+
+ self._yield_per = num
+ self.iterator = itertools.chain.from_iterable(self.chunks(num))
+
+ def _soft_close(self, **kw):
+ super(ChunkedIteratorResult, self)._soft_close(**kw)
+ self.chunks = lambda size: []
+
+ def _fetchmany_impl(self, size=None):
+ if self.dynamic_yield_per:
+ self.iterator = itertools.chain.from_iterable(self.chunks(size))
+ return super(ChunkedIteratorResult, self)._fetchmany_impl(size=size)
+
+
+class MergedResult(IteratorResult):
+ """A :class:`_engine.Result` that is merged from any number of
+ :class:`_engine.Result` objects.
+
+ Returned by the :meth:`_engine.Result.merge` method.
+
+ .. versionadded:: 1.4
+
+ """
+
+ closed = False
+
+ def __init__(self, cursor_metadata, results):
+ self._results = results
+ super(MergedResult, self).__init__(
+ cursor_metadata,
+ itertools.chain.from_iterable(
+ r._raw_row_iterator() for r in results
+ ),
+ )
+
+ self._unique_filter_state = results[0]._unique_filter_state
+ self._yield_per = results[0]._yield_per
+
+ # going to try something w/ this in next rev
+ self._source_supports_scalars = results[0]._source_supports_scalars
+
+ self._attributes = self._attributes.merge_with(
+ *[r._attributes for r in results]
+ )
+
+ def _soft_close(self, hard=False, **kw):
+ for r in self._results:
+ r._soft_close(hard=hard, **kw)
+ if hard:
+ self.closed = True
diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py
new file mode 100644
index 0000000..e80e8c6
--- /dev/null
+++ b/lib/sqlalchemy/engine/row.py
@@ -0,0 +1,621 @@
+# engine/row.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Define row constructs including :class:`.Row`."""
+
+
+import operator
+
+from .. import util
+from ..sql import util as sql_util
+from ..util.compat import collections_abc
+
+MD_INDEX = 0 # integer index in cursor.description
+
+# This reconstructor is necessary so that pickles with the C extension or
+# without use the same Binary format.
+try:
+ # We need a different reconstructor on the C extension so that we can
+ # add extra checks that fields have correctly been initialized by
+ # __setstate__.
+ from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
+
+ # The extra function embedding is needed so that the
+ # reconstructor function has the same signature whether or not
+ # the extension is present.
+ def rowproxy_reconstructor(cls, state):
+ return safe_rowproxy_reconstructor(cls, state)
+
+
+except ImportError:
+
+ def rowproxy_reconstructor(cls, state):
+ obj = cls.__new__(cls)
+ obj.__setstate__(state)
+ return obj
+
+
+KEY_INTEGER_ONLY = 0
+"""__getitem__ only allows integer values, raises TypeError otherwise"""
+
+KEY_OBJECTS_ONLY = 1
+"""__getitem__ only allows string/object values, raises TypeError otherwise"""
+
+KEY_OBJECTS_BUT_WARN = 2
+"""__getitem__ allows integer or string/object values, but emits a 2.0
+deprecation warning if string/object is passed"""
+
+KEY_OBJECTS_NO_WARN = 3
+"""__getitem__ allows integer or string/object values with no warnings
+or errors."""
+
+try:
+ from sqlalchemy.cresultproxy import BaseRow
+
+ _baserow_usecext = True
+except ImportError:
+ _baserow_usecext = False
+
+ class BaseRow(object):
+ __slots__ = ("_parent", "_data", "_keymap", "_key_style")
+
+ def __init__(self, parent, processors, keymap, key_style, data):
+ """Row objects are constructed by CursorResult objects."""
+
+ object.__setattr__(self, "_parent", parent)
+
+ if processors:
+ object.__setattr__(
+ self,
+ "_data",
+ tuple(
+ [
+ proc(value) if proc else value
+ for proc, value in zip(processors, data)
+ ]
+ ),
+ )
+ else:
+ object.__setattr__(self, "_data", tuple(data))
+
+ object.__setattr__(self, "_keymap", keymap)
+
+ object.__setattr__(self, "_key_style", key_style)
+
+ def __reduce__(self):
+ return (
+ rowproxy_reconstructor,
+ (self.__class__, self.__getstate__()),
+ )
+
+ def _filter_on_values(self, filters):
+ return Row(
+ self._parent,
+ filters,
+ self._keymap,
+ self._key_style,
+ self._data,
+ )
+
+ def _values_impl(self):
+ return list(self)
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def __len__(self):
+ return len(self._data)
+
+ def __hash__(self):
+ return hash(self._data)
+
+ def _get_by_int_impl(self, key):
+ return self._data[key]
+
+ def _get_by_key_impl(self, key):
+ if int in key.__class__.__mro__:
+ return self._data[key]
+
+ if self._key_style == KEY_INTEGER_ONLY:
+ self._parent._raise_for_nonint(key)
+
+ # the following is all LegacyRow support. none of this
+ # should be called if not LegacyRow
+ # assert isinstance(self, LegacyRow)
+
+ try:
+ rec = self._keymap[key]
+ except KeyError as ke:
+ rec = self._parent._key_fallback(key, ke)
+ except TypeError:
+ if isinstance(key, slice):
+ return tuple(self._data[key])
+ else:
+ raise
+
+ mdindex = rec[MD_INDEX]
+ if mdindex is None:
+ self._parent._raise_for_ambiguous_column_name(rec)
+
+ elif self._key_style == KEY_OBJECTS_BUT_WARN and mdindex != key:
+ self._parent._warn_for_nonint(key)
+
+ return self._data[mdindex]
+
+ # The original 1.4 plan was that Row would not allow row["str"]
+ # access, however as the C extensions were inadvertently allowing
+ # this coupled with the fact that orm Session sets future=True,
+ # this allows a softer upgrade path. see #6218
+ __getitem__ = _get_by_key_impl
+
+ def _get_by_key_impl_mapping(self, key):
+ try:
+ rec = self._keymap[key]
+ except KeyError as ke:
+ rec = self._parent._key_fallback(key, ke)
+
+ mdindex = rec[MD_INDEX]
+ if mdindex is None:
+ self._parent._raise_for_ambiguous_column_name(rec)
+ elif (
+ self._key_style == KEY_OBJECTS_ONLY
+ and int in key.__class__.__mro__
+ ):
+ raise KeyError(key)
+
+ return self._data[mdindex]
+
+ def __getattr__(self, name):
+ try:
+ return self._get_by_key_impl_mapping(name)
+ except KeyError as e:
+ util.raise_(AttributeError(e.args[0]), replace_context=e)
+
+
+class Row(BaseRow, collections_abc.Sequence):
+ """Represent a single result row.
+
+ The :class:`.Row` object represents a row of a database result. It is
+ typically associated in the 1.x series of SQLAlchemy with the
+ :class:`_engine.CursorResult` object, however is also used by the ORM for
+ tuple-like results as of SQLAlchemy 1.4.
+
+ The :class:`.Row` object seeks to act as much like a Python named
+ tuple as possible. For mapping (i.e. dictionary) behavior on a row,
+ such as testing for containment of keys, refer to the :attr:`.Row._mapping`
+ attribute.
+
+ .. seealso::
+
+ :ref:`tutorial_selecting_data` - includes examples of selecting
+ rows from SELECT statements.
+
+ :class:`.LegacyRow` - Compatibility interface introduced in SQLAlchemy
+ 1.4.
+
+ .. versionchanged:: 1.4
+
+ Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a
+ "proxy" object in that it contains the final form of data within it,
+ and now acts mostly like a named tuple. Mapping-like functionality is
+ moved to the :attr:`.Row._mapping` attribute, but will remain available
+ in SQLAlchemy 1.x series via the :class:`.LegacyRow` class that is used
+ by :class:`_engine.LegacyCursorResult`.
+ See :ref:`change_4710_core` for background
+ on this change.
+
+ """
+
+ __slots__ = ()
+
+ # in 2.0, this should be KEY_INTEGER_ONLY
+ _default_key_style = KEY_OBJECTS_BUT_WARN
+
+ def __setattr__(self, name, value):
+ raise AttributeError("can't set attribute")
+
+ def __delattr__(self, name):
+ raise AttributeError("can't delete attribute")
+
+ @property
+ def _mapping(self):
+ """Return a :class:`.RowMapping` for this :class:`.Row`.
+
+ This object provides a consistent Python mapping (i.e. dictionary)
+ interface for the data contained within the row. The :class:`.Row`
+ by itself behaves like a named tuple, however in the 1.4 series of
+ SQLAlchemy, the :class:`.LegacyRow` class is still used by Core which
+ continues to have mapping-like behaviors against the row object
+ itself.
+
+ .. seealso::
+
+ :attr:`.Row._fields`
+
+ .. versionadded:: 1.4
+
+ """
+ return RowMapping(
+ self._parent,
+ None,
+ self._keymap,
+ RowMapping._default_key_style,
+ self._data,
+ )
+
+ def _special_name_accessor(name):
+ """Handle ambiguous names such as "count" and "index" """
+
+ @property
+ def go(self):
+ if self._parent._has_key(name):
+ return self.__getattr__(name)
+ else:
+
+ def meth(*arg, **kw):
+ return getattr(collections_abc.Sequence, name)(
+ self, *arg, **kw
+ )
+
+ return meth
+
+ return go
+
+ count = _special_name_accessor("count")
+ index = _special_name_accessor("index")
+
+ def __contains__(self, key):
+ return key in self._data
+
+ def __getstate__(self):
+ return {
+ "_parent": self._parent,
+ "_data": self._data,
+ "_key_style": self._key_style,
+ }
+
+ def __setstate__(self, state):
+ parent = state["_parent"]
+ object.__setattr__(self, "_parent", parent)
+ object.__setattr__(self, "_data", state["_data"])
+ object.__setattr__(self, "_keymap", parent._keymap)
+ object.__setattr__(self, "_key_style", state["_key_style"])
+
+ def _op(self, other, op):
+ return (
+ op(tuple(self), tuple(other))
+ if isinstance(other, Row)
+ else op(tuple(self), other)
+ )
+
+ __hash__ = BaseRow.__hash__
+
+ def __lt__(self, other):
+ return self._op(other, operator.lt)
+
+ def __le__(self, other):
+ return self._op(other, operator.le)
+
+ def __ge__(self, other):
+ return self._op(other, operator.ge)
+
+ def __gt__(self, other):
+ return self._op(other, operator.gt)
+
+ def __eq__(self, other):
+ return self._op(other, operator.eq)
+
+ def __ne__(self, other):
+ return self._op(other, operator.ne)
+
+ def __repr__(self):
+ return repr(sql_util._repr_row(self))
+
+ @util.deprecated_20(
+ ":meth:`.Row.keys`",
+ alternative="Use the namedtuple standard accessor "
+ ":attr:`.Row._fields`, or for full mapping behavior use "
+ "row._mapping.keys() ",
+ )
+ def keys(self):
+ """Return the list of keys as strings represented by this
+ :class:`.Row`.
+
+ The keys can represent the labels of the columns returned by a core
+ statement or the names of the orm classes returned by an orm
+ execution.
+
+ This method is analogous to the Python dictionary ``.keys()`` method,
+ except that it returns a list, not an iterator.
+
+ .. seealso::
+
+ :attr:`.Row._fields`
+
+ :attr:`.Row._mapping`
+
+ """
+ return self._parent.keys
+
+ @property
+ def _fields(self):
+ """Return a tuple of string keys as represented by this
+ :class:`.Row`.
+
+ The keys can represent the labels of the columns returned by a core
+ statement or the names of the orm classes returned by an orm
+ execution.
+
+ This attribute is analogous to the Python named tuple ``._fields``
+ attribute.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`.Row._mapping`
+
+ """
+ return tuple([k for k in self._parent.keys if k is not None])
+
+ def _asdict(self):
+ """Return a new dict which maps field names to their corresponding
+ values.
+
+ This method is analogous to the Python named tuple ``._asdict()``
+ method, and works by applying the ``dict()`` constructor to the
+ :attr:`.Row._mapping` attribute.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`.Row._mapping`
+
+ """
+ return dict(self._mapping)
+
+ def _replace(self):
+ raise NotImplementedError()
+
+ @property
+ def _field_defaults(self):
+ raise NotImplementedError()
+
+
+class LegacyRow(Row):
+ """A subclass of :class:`.Row` that delivers 1.x SQLAlchemy behaviors
+ for Core.
+
+ The :class:`.LegacyRow` class is where most of the Python mapping
+ (i.e. dictionary-like)
+ behaviors are implemented for the row object. The mapping behavior
+ of :class:`.Row` going forward is accessible via the :class:`.Row._mapping`
+ attribute.
+
+ .. versionadded:: 1.4 - added :class:`.LegacyRow` which encapsulates most
+ of the deprecated behaviors of :class:`.Row`.
+
+ """
+
+ __slots__ = ()
+
+ if util.SQLALCHEMY_WARN_20:
+ _default_key_style = KEY_OBJECTS_BUT_WARN
+ else:
+ _default_key_style = KEY_OBJECTS_NO_WARN
+
+ def __contains__(self, key):
+ return self._parent._contains(key, self)
+
+ # prior to #6218, LegacyRow would redirect the behavior of __getitem__
+ # for the non C version of BaseRow. This is now set up by Python BaseRow
+ # in all cases
+ # if not _baserow_usecext:
+ # __getitem__ = BaseRow._get_by_key_impl
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.has_key` method is deprecated and will be "
+ "removed in a future release. To test for key membership, use "
+ "the :attr:`Row._mapping` attribute, i.e. 'key in row._mapping`.",
+ )
+ def has_key(self, key):
+ """Return True if this :class:`.LegacyRow` contains the given key.
+
+ Through the SQLAlchemy 1.x series, the ``__contains__()`` method of
+ :class:`.Row` (or :class:`.LegacyRow` as of SQLAlchemy 1.4) also links
+ to :meth:`.Row.has_key`, in that an expression such as ::
+
+ "some_col" in row
+
+ Will return True if the row contains a column named ``"some_col"``,
+ in the way that a Python mapping works.
+
+ However, it is planned that the 2.0 series of SQLAlchemy will reverse
+ this behavior so that ``__contains__()`` will refer to a value being
+ present in the row, in the way that a Python tuple works.
+
+ .. seealso::
+
+ :ref:`change_4710_core`
+
+ """
+
+ return self._parent._has_key(key)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.items` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.items()'.",
+ )
+ def items(self):
+ """Return a list of tuples, each tuple containing a key/value pair.
+
+ This method is analogous to the Python dictionary ``.items()`` method,
+ except that it returns a list, not an iterator.
+
+ """
+
+ return [(key, self[key]) for key in self.keys()]
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.iterkeys` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.keys()'.",
+ )
+ def iterkeys(self):
+ """Return a an iterator against the :meth:`.Row.keys` method.
+
+ This method is analogous to the Python-2-only dictionary
+ ``.iterkeys()`` method.
+
+ """
+ return iter(self._parent.keys)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.itervalues` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.values()'.",
+ )
+ def itervalues(self):
+ """Return a an iterator against the :meth:`.Row.values` method.
+
+ This method is analogous to the Python-2-only dictionary
+ ``.itervalues()`` method.
+
+ """
+ return iter(self)
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`.LegacyRow.values` method is deprecated and will be "
+ "removed in a future release. Use the :attr:`Row._mapping` "
+ "attribute, i.e., 'row._mapping.values()'.",
+ )
+ def values(self):
+ """Return the values represented by this :class:`.Row` as a list.
+
+ This method is analogous to the Python dictionary ``.values()`` method,
+ except that it returns a list, not an iterator.
+
+ """
+
+ return self._values_impl()
+
+
+BaseRowProxy = BaseRow
+RowProxy = Row
+
+
+class ROMappingView(
+ collections_abc.KeysView,
+ collections_abc.ValuesView,
+ collections_abc.ItemsView,
+):
+ __slots__ = (
+ "_mapping",
+ "_items",
+ )
+
+ def __init__(self, mapping, items):
+ self._mapping = mapping
+ self._items = items
+
+ def __len__(self):
+ return len(self._items)
+
+ def __repr__(self):
+ return "{0.__class__.__name__}({0._mapping!r})".format(self)
+
+ def __iter__(self):
+ return iter(self._items)
+
+ def __contains__(self, item):
+ return item in self._items
+
+ def __eq__(self, other):
+ return list(other) == list(self)
+
+ def __ne__(self, other):
+ return list(other) != list(self)
+
+
+class RowMapping(BaseRow, collections_abc.Mapping):
+ """A ``Mapping`` that maps column names and objects to :class:`.Row`
+ values.
+
+ The :class:`.RowMapping` is available from a :class:`.Row` via the
+ :attr:`.Row._mapping` attribute, as well as from the iterable interface
+ provided by the :class:`.MappingResult` object returned by the
+ :meth:`_engine.Result.mappings` method.
+
+ :class:`.RowMapping` supplies Python mapping (i.e. dictionary) access to
+ the contents of the row. This includes support for testing of
+ containment of specific keys (string column names or objects), as well
+ as iteration of keys, values, and items::
+
+ for row in result:
+ if 'a' in row._mapping:
+ print("Column 'a': %s" % row._mapping['a'])
+
+ print("Column b: %s" % row._mapping[table.c.b])
+
+
+ .. versionadded:: 1.4 The :class:`.RowMapping` object replaces the
+ mapping-like access previously provided by a database result row,
+ which now seeks to behave mostly like a named tuple.
+
+ """
+
+ __slots__ = ()
+
+ _default_key_style = KEY_OBJECTS_ONLY
+
+ if not _baserow_usecext:
+
+ __getitem__ = BaseRow._get_by_key_impl_mapping
+
+ def _values_impl(self):
+ return list(self._data)
+
+ def __iter__(self):
+ return (k for k in self._parent.keys if k is not None)
+
+ def __len__(self):
+ return len(self._data)
+
+ def __contains__(self, key):
+ return self._parent._has_key(key)
+
+ def __repr__(self):
+ return repr(dict(self))
+
+ def items(self):
+ """Return a view of key/value tuples for the elements in the
+ underlying :class:`.Row`.
+
+ """
+ return ROMappingView(self, [(key, self[key]) for key in self.keys()])
+
+ def keys(self):
+ """Return a view of 'keys' for string column names represented
+ by the underlying :class:`.Row`.
+
+ """
+
+ return self._parent.keys
+
+ def values(self):
+ """Return a view of values for the values represented in the
+ underlying :class:`.Row`.
+
+ """
+ return ROMappingView(self, self._values_impl())
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
new file mode 100644
index 0000000..54a5e51
--- /dev/null
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -0,0 +1,17 @@
+# engine/strategies.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Deprecated mock engine strategy used by Alembic.
+
+
+"""
+
+from .mock import MockConnection # noqa
+
+
+class MockEngineStrategy(object):
+ MockConnection = MockConnection
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
new file mode 100644
index 0000000..db971c2
--- /dev/null
+++ b/lib/sqlalchemy/engine/url.py
@@ -0,0 +1,806 @@
+# engine/url.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
+information about a database connection specification.
+
+The URL object is created automatically when
+:func:`~sqlalchemy.engine.create_engine` is called with a string
+argument; alternatively, the URL is a public-facing construct which can
+be used directly and is also accepted directly by ``create_engine()``.
+"""
+
+import re
+
+from .interfaces import Dialect
+from .. import exc
+from .. import util
+from ..dialects import plugins
+from ..dialects import registry
+from ..util import collections_abc
+from ..util import compat
+
+
+class URL(
+ util.namedtuple(
+ "URL",
+ [
+ "drivername",
+ "username",
+ "password",
+ "host",
+ "port",
+ "database",
+ "query",
+ ],
+ )
+):
+ """
+ Represent the components of a URL used to connect to a database.
+
+ This object is suitable to be passed directly to a
+ :func:`_sa.create_engine` call. The fields of the URL are parsed
+ from a string by the :func:`.make_url` function. The string
+ format of the URL is an RFC-1738-style string.
+
+ To create a new :class:`_engine.URL` object, use the
+ :func:`_engine.url.make_url` function. To construct a :class:`_engine.URL`
+ programmatically, use the :meth:`_engine.URL.create` constructor.
+
+ .. versionchanged:: 1.4
+
+ The :class:`_engine.URL` object is now an immutable object. To
+ create a URL, use the :func:`_engine.make_url` or
+ :meth:`_engine.URL.create` function / method. To modify
+ a :class:`_engine.URL`, use methods like
+ :meth:`_engine.URL.set` and
+ :meth:`_engine.URL.update_query_dict` to return a new
+ :class:`_engine.URL` object with modifications. See notes for this
+ change at :ref:`change_5526`.
+
+ :class:`_engine.URL` contains the following attributes:
+
+ * :attr:`_engine.URL.drivername`: database backend and driver name, such as
+ ``postgresql+psycopg2``
+ * :attr:`_engine.URL.username`: username string
+ * :attr:`_engine.URL.password`: password string
+ * :attr:`_engine.URL.host`: string hostname
+ * :attr:`_engine.URL.port`: integer port number
+ * :attr:`_engine.URL.database`: string database name
+ * :attr:`_engine.URL.query`: an immutable mapping representing the query
+ string. contains strings for keys and either strings or tuples of
+ strings for values.
+
+
+ """
+
+ def __new__(self, *arg, **kw):
+ if kw.pop("_new_ok", False):
+ return super(URL, self).__new__(self, *arg, **kw)
+ else:
+ util.warn_deprecated(
+ "Calling URL() directly is deprecated and will be disabled "
+ "in a future release. The public constructor for URL is "
+ "now the URL.create() method.",
+ "1.4",
+ )
+ return URL.create(*arg, **kw)
+
+ @classmethod
+ def create(
+ cls,
+ drivername,
+ username=None,
+ password=None,
+ host=None,
+ port=None,
+ database=None,
+ query=util.EMPTY_DICT,
+ ):
+ """Create a new :class:`_engine.URL` object.
+
+ :param drivername: the name of the database backend. This name will
+ correspond to a module in sqlalchemy/databases or a third party
+ plug-in.
+ :param username: The user name.
+ :param password: database password. Is typically a string, but may
+ also be an object that can be stringified with ``str()``.
+
+ .. note:: A password-producing object will be stringified only
+ **once** per :class:`_engine.Engine` object. For dynamic password
+ generation per connect, see :ref:`engines_dynamic_tokens`.
+
+ :param host: The name of the host.
+ :param port: The port number.
+ :param database: The database name.
+ :param query: A dictionary of string keys to string values to be passed
+ to the dialect and/or the DBAPI upon connect. To specify non-string
+ parameters to a Python DBAPI directly, use the
+ :paramref:`_sa.create_engine.connect_args` parameter to
+ :func:`_sa.create_engine`. See also
+ :attr:`_engine.URL.normalized_query` for a dictionary that is
+ consistently string->list of string.
+ :return: new :class:`_engine.URL` object.
+
+ .. versionadded:: 1.4
+
+ The :class:`_engine.URL` object is now an **immutable named
+ tuple**. In addition, the ``query`` dictionary is also immutable.
+ To create a URL, use the :func:`_engine.url.make_url` or
+ :meth:`_engine.URL.create` function/ method. To modify a
+ :class:`_engine.URL`, use the :meth:`_engine.URL.set` and
+ :meth:`_engine.URL.update_query` methods.
+
+ """
+
+ return cls(
+ cls._assert_str(drivername, "drivername"),
+ cls._assert_none_str(username, "username"),
+ password,
+ cls._assert_none_str(host, "host"),
+ cls._assert_port(port),
+ cls._assert_none_str(database, "database"),
+ cls._str_dict(query),
+ _new_ok=True,
+ )
+
+ @classmethod
+ def _assert_port(cls, port):
+ if port is None:
+ return None
+ try:
+ return int(port)
+ except TypeError:
+ raise TypeError("Port argument must be an integer or None")
+
+ @classmethod
+ def _assert_str(cls, v, paramname):
+ if not isinstance(v, compat.string_types):
+ raise TypeError("%s must be a string" % paramname)
+ return v
+
+ @classmethod
+ def _assert_none_str(cls, v, paramname):
+ if v is None:
+ return v
+
+ return cls._assert_str(v, paramname)
+
+ @classmethod
+ def _str_dict(cls, dict_):
+ if dict_ is None:
+ return util.EMPTY_DICT
+
+ def _assert_value(val):
+ if isinstance(val, compat.string_types):
+ return val
+ elif isinstance(val, collections_abc.Sequence):
+ return tuple(_assert_value(elem) for elem in val)
+ else:
+ raise TypeError(
+ "Query dictionary values must be strings or "
+ "sequences of strings"
+ )
+
+ def _assert_str(v):
+ if not isinstance(v, compat.string_types):
+ raise TypeError("Query dictionary keys must be strings")
+ return v
+
+ if isinstance(dict_, collections_abc.Sequence):
+ dict_items = dict_
+ else:
+ dict_items = dict_.items()
+
+ return util.immutabledict(
+ {
+ _assert_str(key): _assert_value(
+ value,
+ )
+ for key, value in dict_items
+ }
+ )
+
+ def set(
+ self,
+ drivername=None,
+ username=None,
+ password=None,
+ host=None,
+ port=None,
+ database=None,
+ query=None,
+ ):
+ """return a new :class:`_engine.URL` object with modifications.
+
+ Values are used if they are non-None. To set a value to ``None``
+ explicitly, use the :meth:`_engine.URL._replace` method adapted
+ from ``namedtuple``.
+
+ :param drivername: new drivername
+ :param username: new username
+ :param password: new password
+ :param host: new hostname
+ :param port: new port
+ :param query: new query parameters, passed a dict of string keys
+ referring to string or sequence of string values. Fully
+ replaces the previous list of arguments.
+
+ :return: new :class:`_engine.URL` object.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :meth:`_engine.URL.update_query_dict`
+
+ """
+
+ kw = {}
+ if drivername is not None:
+ kw["drivername"] = drivername
+ if username is not None:
+ kw["username"] = username
+ if password is not None:
+ kw["password"] = password
+ if host is not None:
+ kw["host"] = host
+ if port is not None:
+ kw["port"] = port
+ if database is not None:
+ kw["database"] = database
+ if query is not None:
+ kw["query"] = query
+
+ return self._replace(**kw)
+
+ def _replace(self, **kw):
+ """Override ``namedtuple._replace()`` to provide argument checking."""
+
+ if "drivername" in kw:
+ self._assert_str(kw["drivername"], "drivername")
+ for name in "username", "host", "database":
+ if name in kw:
+ self._assert_none_str(kw[name], name)
+ if "port" in kw:
+ self._assert_port(kw["port"])
+ if "query" in kw:
+ kw["query"] = self._str_dict(kw["query"])
+
+ return super(URL, self)._replace(**kw)
+
+ def update_query_string(self, query_string, append=False):
+ """Return a new :class:`_engine.URL` object with the :attr:`_engine.URL.query`
+ parameter dictionary updated by the given query string.
+
+ E.g.::
+
+ >>> from sqlalchemy.engine import make_url
+ >>> url = make_url("postgresql://user:pass@host/dbname")
+ >>> url = url.update_query_string("alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt")
+ >>> str(url)
+ 'postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
+
+ :param query_string: a URL escaped query string, not including the
+ question mark.
+
+ :param append: if True, parameters in the existing query string will
+ not be removed; new parameters will be in addition to those present.
+ If left at its default of False, keys present in the given query
+ parameters will replace those of the existing query string.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`_engine.URL.query`
+
+ :meth:`_engine.URL.update_query_dict`
+
+ """ # noqa: E501
+ return self.update_query_pairs(
+ util.parse_qsl(query_string), append=append
+ )
+
+ def update_query_pairs(self, key_value_pairs, append=False):
+ """Return a new :class:`_engine.URL` object with the
+ :attr:`_engine.URL.query`
+ parameter dictionary updated by the given sequence of key/value pairs
+
+ E.g.::
+
+ >>> from sqlalchemy.engine import make_url
+ >>> url = make_url("postgresql://user:pass@host/dbname")
+ >>> url = url.update_query_pairs([("alt_host", "host1"), ("alt_host", "host2"), ("ssl_cipher", "/path/to/crt")])
+ >>> str(url)
+ 'postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
+
+ :param key_value_pairs: A sequence of tuples containing two strings
+ each.
+
+ :param append: if True, parameters in the existing query string will
+ not be removed; new parameters will be in addition to those present.
+ If left at its default of False, keys present in the given query
+ parameters will replace those of the existing query string.
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`_engine.URL.query`
+
+ :meth:`_engine.URL.difference_update_query`
+
+ :meth:`_engine.URL.set`
+
+ """ # noqa: E501
+
+ existing_query = self.query
+ new_keys = {}
+
+ for key, value in key_value_pairs:
+ if key in new_keys:
+ new_keys[key] = util.to_list(new_keys[key])
+ new_keys[key].append(value)
+ else:
+ new_keys[key] = value
+
+ if append:
+ new_query = {}
+
+ for k in new_keys:
+ if k in existing_query:
+ new_query[k] = util.to_list(
+ existing_query[k]
+ ) + util.to_list(new_keys[k])
+ else:
+ new_query[k] = new_keys[k]
+
+ new_query.update(
+ {
+ k: existing_query[k]
+ for k in set(existing_query).difference(new_keys)
+ }
+ )
+ else:
+ new_query = self.query.union(new_keys)
+ return self.set(query=new_query)
+
+ def update_query_dict(self, query_parameters, append=False):
+ """Return a new :class:`_engine.URL` object with the
+ :attr:`_engine.URL.query` parameter dictionary updated by the given
+ dictionary.
+
+ The dictionary typically contains string keys and string values.
+ In order to represent a query parameter that is expressed multiple
+ times, pass a sequence of string values.
+
+ E.g.::
+
+
+ >>> from sqlalchemy.engine import make_url
+ >>> url = make_url("postgresql://user:pass@host/dbname")
+ >>> url = url.update_query_dict({"alt_host": ["host1", "host2"], "ssl_cipher": "/path/to/crt"})
+ >>> str(url)
+ 'postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
+
+
+ :param query_parameters: A dictionary with string keys and values
+ that are either strings, or sequences of strings.
+
+ :param append: if True, parameters in the existing query string will
+ not be removed; new parameters will be in addition to those present.
+ If left at its default of False, keys present in the given query
+ parameters will replace those of the existing query string.
+
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`_engine.URL.query`
+
+ :meth:`_engine.URL.update_query_string`
+
+ :meth:`_engine.URL.update_query_pairs`
+
+ :meth:`_engine.URL.difference_update_query`
+
+ :meth:`_engine.URL.set`
+
+ """ # noqa: E501
+ return self.update_query_pairs(query_parameters.items(), append=append)
+
+ def difference_update_query(self, names):
+ """
+ Remove the given names from the :attr:`_engine.URL.query` dictionary,
+ returning the new :class:`_engine.URL`.
+
+ E.g.::
+
+ url = url.difference_update_query(['foo', 'bar'])
+
+ Equivalent to using :meth:`_engine.URL.set` as follows::
+
+ url = url.set(
+ query={
+ key: url.query[key]
+ for key in set(url.query).difference(['foo', 'bar'])
+ }
+ )
+
+ .. versionadded:: 1.4
+
+ .. seealso::
+
+ :attr:`_engine.URL.query`
+
+ :meth:`_engine.URL.update_query_dict`
+
+ :meth:`_engine.URL.set`
+
+ """
+
+ if not set(names).intersection(self.query):
+ return self
+
+ return URL(
+ self.drivername,
+ self.username,
+ self.password,
+ self.host,
+ self.port,
+ self.database,
+ util.immutabledict(
+ {
+ key: self.query[key]
+ for key in set(self.query).difference(names)
+ }
+ ),
+ _new_ok=True,
+ )
+
+ @util.memoized_property
+ def normalized_query(self):
+ """Return the :attr:`_engine.URL.query` dictionary with values normalized
+ into sequences.
+
+ As the :attr:`_engine.URL.query` dictionary may contain either
+ string values or sequences of string values to differentiate between
+ parameters that are specified multiple times in the query string,
+ code that needs to handle multiple parameters generically will wish
+ to use this attribute so that all parameters present are presented
+ as sequences. Inspiration is from Python's ``urllib.parse.parse_qs``
+ function. E.g.::
+
+
+ >>> from sqlalchemy.engine import make_url
+ >>> url = make_url("postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt")
+ >>> url.query
+ immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'})
+ >>> url.normalized_query
+ immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': ('/path/to/crt',)})
+
+ """ # noqa: E501
+
+ return util.immutabledict(
+ {
+ k: (v,) if not isinstance(v, tuple) else v
+ for k, v in self.query.items()
+ }
+ )
+
+ @util.deprecated(
+ "1.4",
+ "The :meth:`_engine.URL.__to_string__ method is deprecated and will "
+ "be removed in a future release. Please use the "
+ ":meth:`_engine.URL.render_as_string` method.",
+ )
+ def __to_string__(self, hide_password=True):
+ """Render this :class:`_engine.URL` object as a string.
+
+ :param hide_password: Defaults to True. The password is not shown
+ in the string unless this is set to False.
+
+ """
+ return self.render_as_string(hide_password=hide_password)
+
+ def render_as_string(self, hide_password=True):
+ """Render this :class:`_engine.URL` object as a string.
+
+ This method is used when the ``__str__()`` or ``__repr__()``
+ methods are used. The method directly includes additional options.
+
+ :param hide_password: Defaults to True. The password is not shown
+ in the string unless this is set to False.
+
+ """
+ s = self.drivername + "://"
+ if self.username is not None:
+ s += _rfc_1738_quote(self.username)
+ if self.password is not None:
+ s += ":" + (
+ "***"
+ if hide_password
+ else _rfc_1738_quote(str(self.password))
+ )
+ s += "@"
+ if self.host is not None:
+ if ":" in self.host:
+ s += "[%s]" % self.host
+ else:
+ s += self.host
+ if self.port is not None:
+ s += ":" + str(self.port)
+ if self.database is not None:
+ s += "/" + self.database
+ if self.query:
+ keys = list(self.query)
+ keys.sort()
+ s += "?" + "&".join(
+ "%s=%s" % (util.quote_plus(k), util.quote_plus(element))
+ for k in keys
+ for element in util.to_list(self.query[k])
+ )
+ return s
+
+ def __str__(self):
+ return self.render_as_string(hide_password=False)
+
+ def __repr__(self):
+ return self.render_as_string()
+
+ def __copy__(self):
+ return self.__class__.create(
+ self.drivername,
+ self.username,
+ self.password,
+ self.host,
+ self.port,
+ self.database,
+ # note this is an immutabledict of str-> str / tuple of str,
+ # also fully immutable. does not require deepcopy
+ self.query,
+ )
+
+ def __deepcopy__(self, memo):
+ return self.__copy__()
+
+ def __hash__(self):
+ return hash(str(self))
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, URL)
+ and self.drivername == other.drivername
+ and self.username == other.username
+ and self.password == other.password
+ and self.host == other.host
+ and self.database == other.database
+ and self.query == other.query
+ and self.port == other.port
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def get_backend_name(self):
+ """Return the backend name.
+
+ This is the name that corresponds to the database backend in
+ use, and is the portion of the :attr:`_engine.URL.drivername`
+ that is to the left of the plus sign.
+
+ """
+ if "+" not in self.drivername:
+ return self.drivername
+ else:
+ return self.drivername.split("+")[0]
+
+ def get_driver_name(self):
+ """Return the backend name.
+
+ This is the name that corresponds to the DBAPI driver in
+ use, and is the portion of the :attr:`_engine.URL.drivername`
+ that is to the right of the plus sign.
+
+ If the :attr:`_engine.URL.drivername` does not include a plus sign,
+ then the default :class:`_engine.Dialect` for this :class:`_engine.URL`
+ is imported in order to get the driver name.
+
+ """
+
+ if "+" not in self.drivername:
+ return self.get_dialect().driver
+ else:
+ return self.drivername.split("+")[1]
+
+ def _instantiate_plugins(self, kwargs):
+ plugin_names = util.to_list(self.query.get("plugin", ()))
+ plugin_names += kwargs.get("plugins", [])
+
+ kwargs = dict(kwargs)
+
+ loaded_plugins = [
+ plugins.load(plugin_name)(self, kwargs)
+ for plugin_name in plugin_names
+ ]
+
+ u = self.difference_update_query(["plugin", "plugins"])
+
+ for plugin in loaded_plugins:
+ new_u = plugin.update_url(u)
+ if new_u is not None:
+ u = new_u
+
+ kwargs.pop("plugins", None)
+
+ return u, loaded_plugins, kwargs
+
+ def _get_entrypoint(self):
+ """Return the "entry point" dialect class.
+
+ This is normally the dialect itself except in the case when the
+ returned class implements the get_dialect_cls() method.
+
+ """
+ if "+" not in self.drivername:
+ name = self.drivername
+ else:
+ name = self.drivername.replace("+", ".")
+ cls = registry.load(name)
+ # check for legacy dialects that
+ # would return a module with 'dialect' as the
+ # actual class
+ if (
+ hasattr(cls, "dialect")
+ and isinstance(cls.dialect, type)
+ and issubclass(cls.dialect, Dialect)
+ ):
+ return cls.dialect
+ else:
+ return cls
+
+ def get_dialect(self):
+ """Return the SQLAlchemy :class:`_engine.Dialect` class corresponding
+ to this URL's driver name.
+
+ """
+ entrypoint = self._get_entrypoint()
+ dialect_cls = entrypoint.get_dialect_cls(self)
+ return dialect_cls
+
+ def translate_connect_args(self, names=None, **kw):
+ r"""Translate url attributes into a dictionary of connection arguments.
+
+ Returns attributes of this url (`host`, `database`, `username`,
+ `password`, `port`) as a plain dictionary. The attribute names are
+ used as the keys by default. Unset or false attributes are omitted
+ from the final dictionary.
+
+ :param \**kw: Optional, alternate key names for url attributes.
+
+ :param names: Deprecated. Same purpose as the keyword-based alternate
+ names, but correlates the name to the original positionally.
+ """
+
+ if names is not None:
+ util.warn_deprecated(
+ "The `URL.translate_connect_args.name`s parameter is "
+ "deprecated. Please pass the "
+ "alternate names as kw arguments.",
+ "1.4",
+ )
+
+ translated = {}
+ attribute_names = ["host", "database", "username", "password", "port"]
+ for sname in attribute_names:
+ if names:
+ name = names.pop(0)
+ elif sname in kw:
+ name = kw[sname]
+ else:
+ name = sname
+ if name is not None and getattr(self, sname, False):
+ if sname == "password":
+ translated[name] = str(getattr(self, sname))
+ else:
+ translated[name] = getattr(self, sname)
+
+ return translated
+
+
+def make_url(name_or_url):
+ """Given a string or unicode instance, produce a new URL instance.
+
+ The given string is parsed according to the RFC 1738 spec. If an
+ existing URL object is passed, just returns the object.
+ """
+
+ if isinstance(name_or_url, util.string_types):
+ return _parse_rfc1738_args(name_or_url)
+ else:
+ return name_or_url
+
+
+def _parse_rfc1738_args(name):
+ pattern = re.compile(
+ r"""
+ (?P<name>[\w\+]+)://
+ (?:
+ (?P<username>[^:/]*)
+ (?::(?P<password>[^@]*))?
+ @)?
+ (?:
+ (?:
+ \[(?P<ipv6host>[^/\?]+)\] |
+ (?P<ipv4host>[^/:\?]+)
+ )?
+ (?::(?P<port>[^/\?]*))?
+ )?
+ (?:/(?P<database>[^\?]*))?
+ (?:\?(?P<query>.*))?
+ """,
+ re.X,
+ )
+
+ m = pattern.match(name)
+ if m is not None:
+ components = m.groupdict()
+ if components["query"] is not None:
+ query = {}
+
+ for key, value in util.parse_qsl(components["query"]):
+ if util.py2k:
+ key = key.encode("ascii")
+ if key in query:
+ query[key] = util.to_list(query[key])
+ query[key].append(value)
+ else:
+ query[key] = value
+ else:
+ query = None
+ components["query"] = query
+
+ if components["username"] is not None:
+ components["username"] = _rfc_1738_unquote(components["username"])
+
+ if components["password"] is not None:
+ components["password"] = _rfc_1738_unquote(components["password"])
+
+ ipv4host = components.pop("ipv4host")
+ ipv6host = components.pop("ipv6host")
+ components["host"] = ipv4host or ipv6host
+ name = components.pop("name")
+
+ if components["port"]:
+ components["port"] = int(components["port"])
+
+ return URL.create(name, **components)
+
+ else:
+ raise exc.ArgumentError(
+ "Could not parse rfc1738 URL from string '%s'" % name
+ )
+
+
+def _rfc_1738_quote(text):
+ return re.sub(r"[:@/]", lambda m: "%%%X" % ord(m.group(0)), text)
+
+
+def _rfc_1738_unquote(text):
+ return util.unquote(text)
+
+
+def _parse_keyvalue_args(name):
+ m = re.match(r"(\w+)://(.*)", name)
+ if m is not None:
+ (name, args) = m.group(1, 2)
+ opts = dict(util.parse_qsl(args))
+ return URL(name, *opts)
+ else:
+ return None
diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py
new file mode 100644
index 0000000..1b03ebb
--- /dev/null
+++ b/lib/sqlalchemy/engine/util.py
@@ -0,0 +1,253 @@
+# engine/util.py
+# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: https://www.opensource.org/licenses/mit-license.php
+
+from .. import exc
+from .. import util
+from ..util import collections_abc
+from ..util import immutabledict
+
+
+def connection_memoize(key):
+ """Decorator, memoize a function in a connection.info stash.
+
+ Only applicable to functions which take no arguments other than a
+ connection. The memo will be stored in ``connection.info[key]``.
+ """
+
+ @util.decorator
+ def decorated(fn, self, connection):
+ connection = connection.connect()
+ try:
+ return connection.info[key]
+ except KeyError:
+ connection.info[key] = val = fn(self, connection)
+ return val
+
+ return decorated
+
+
+_no_tuple = ()
+_no_kw = util.immutabledict()
+
+
+def _distill_params(connection, multiparams, params):
+ r"""Given arguments from the calling form \*multiparams, \**params,
+ return a list of bind parameter structures, usually a list of
+ dictionaries.
+
+ In the case of 'raw' execution which accepts positional parameters,
+ it may be a list of tuples or lists.
+
+ """
+
+ if not multiparams:
+ if params:
+ connection._warn_for_legacy_exec_format()
+ return [params]
+ else:
+ return []
+ elif len(multiparams) == 1:
+ zero = multiparams[0]
+ if isinstance(zero, (list, tuple)):
+ if (
+ not zero
+ or hasattr(zero[0], "__iter__")
+ and not hasattr(zero[0], "strip")
+ ):
+ # execute(stmt, [{}, {}, {}, ...])
+ # execute(stmt, [(), (), (), ...])
+ return zero
+ else:
+ # this is used by exec_driver_sql only, so a deprecation
+ # warning would already be coming from passing a plain
+ # textual statement with positional parameters to
+ # execute().
+ # execute(stmt, ("value", "value"))
+ return [zero]
+ elif hasattr(zero, "keys"):
+ # execute(stmt, {"key":"value"})
+ return [zero]
+ else:
+ connection._warn_for_legacy_exec_format()
+ # execute(stmt, "value")
+ return [[zero]]
+ else:
+ connection._warn_for_legacy_exec_format()
+ if hasattr(multiparams[0], "__iter__") and not hasattr(
+ multiparams[0], "strip"
+ ):
+ return multiparams
+ else:
+ return [multiparams]
+
+
+def _distill_cursor_params(connection, multiparams, params):
+ """_distill_params without any warnings. more appropriate for
+ "cursor" params that can include tuple arguments, lists of tuples,
+ etc.
+
+ """
+
+ if not multiparams:
+ if params:
+ return [params]
+ else:
+ return []
+ elif len(multiparams) == 1:
+ zero = multiparams[0]
+ if isinstance(zero, (list, tuple)):
+ if (
+ not zero
+ or hasattr(zero[0], "__iter__")
+ and not hasattr(zero[0], "strip")
+ ):
+ # execute(stmt, [{}, {}, {}, ...])
+ # execute(stmt, [(), (), (), ...])
+ return zero
+ else:
+ # this is used by exec_driver_sql only, so a deprecation
+ # warning would already be coming from passing a plain
+ # textual statement with positional parameters to
+ # execute().
+ # execute(stmt, ("value", "value"))
+
+ return [zero]
+ elif hasattr(zero, "keys"):
+ # execute(stmt, {"key":"value"})
+ return [zero]
+ else:
+ # execute(stmt, "value")
+ return [[zero]]
+ else:
+ if hasattr(multiparams[0], "__iter__") and not hasattr(
+ multiparams[0], "strip"
+ ):
+ return multiparams
+ else:
+ return [multiparams]
+
+
+def _distill_params_20(params):
+ if params is None:
+ return _no_tuple, _no_kw
+ elif isinstance(params, list):
+ # collections_abc.MutableSequence): # avoid abc.__instancecheck__
+ if params and not isinstance(
+ params[0], (collections_abc.Mapping, tuple)
+ ):
+ raise exc.ArgumentError(
+ "List argument must consist only of tuples or dictionaries"
+ )
+
+ return (params,), _no_kw
+ elif isinstance(
+ params,
+ (tuple, dict, immutabledict),
+ # only do abc.__instancecheck__ for Mapping after we've checked
+ # for plain dictionaries and would otherwise raise
+ ) or isinstance(params, collections_abc.Mapping):
+ return (params,), _no_kw
+ else:
+ raise exc.ArgumentError("mapping or sequence expected for parameters")
+
+
+class TransactionalContext(object):
+ """Apply Python context manager behavior to transaction objects.
+
+ Performs validation to ensure the subject of the transaction is not
+ used if the transaction were ended prematurely.
+
+ """
+
+ _trans_subject = None
+
+ def _transaction_is_active(self):
+ raise NotImplementedError()
+
+ def _transaction_is_closed(self):
+ raise NotImplementedError()
+
+ def _rollback_can_be_called(self):
+ """indicates the object is in a state that is known to be acceptable
+ for rollback() to be called.
+
+ This does not necessarily mean rollback() will succeed or not raise
+ an error, just that there is currently no state detected that indicates
+ rollback() would fail or emit warnings.
+
+ It also does not mean that there's a transaction in progress, as
+ it is usually safe to call rollback() even if no transaction is
+ present.
+
+ .. versionadded:: 1.4.28
+
+ """
+ raise NotImplementedError()
+
+ def _get_subject(self):
+ raise NotImplementedError()
+
+ @classmethod
+ def _trans_ctx_check(cls, subject):
+ trans_context = subject._trans_context_manager
+ if trans_context:
+ if not trans_context._transaction_is_active():
+ raise exc.InvalidRequestError(
+ "Can't operate on closed transaction inside context "
+ "manager. Please complete the context manager "
+ "before emitting further commands."
+ )
+
+ def __enter__(self):
+ subject = self._get_subject()
+
+ # none for outer transaction, may be non-None for nested
+ # savepoint, legacy nesting cases
+ trans_context = subject._trans_context_manager
+ self._outer_trans_ctx = trans_context
+
+ self._trans_subject = subject
+ subject._trans_context_manager = self
+ return self
+
+ def __exit__(self, type_, value, traceback):
+ subject = self._trans_subject
+
+ # simplistically we could assume that
+ # "subject._trans_context_manager is self". However, any calling
+ # code that is manipulating __exit__ directly would break this
+ # assumption. alembic context manager
+ # is an example of partial use that just calls __exit__ and
+ # not __enter__ at the moment. it's safe to assume this is being done
+ # in the wild also
+ out_of_band_exit = (
+ subject is None or subject._trans_context_manager is not self
+ )
+
+ if type_ is None and self._transaction_is_active():
+ try:
+ self.commit()
+ except:
+ with util.safe_reraise():
+ if self._rollback_can_be_called():
+ self.rollback()
+ finally:
+ if not out_of_band_exit:
+ subject._trans_context_manager = self._outer_trans_ctx
+ self._trans_subject = self._outer_trans_ctx = None
+ else:
+ try:
+ if not self._transaction_is_active():
+ if not self._transaction_is_closed():
+ self.close()
+ else:
+ if self._rollback_can_be_called():
+ self.rollback()
+ finally:
+ if not out_of_band_exit:
+ subject._trans_context_manager = self._outer_trans_ctx
+ self._trans_subject = self._outer_trans_ctx = None