first add files
This commit is contained in:
62
lib/sqlalchemy/engine/__init__.py
Normal file
62
lib/sqlalchemy/engine/__init__.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# engine/__init__.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQL connections, SQL execution and high-level DB-API interface.
|
||||
|
||||
The engine package defines the basic components used to interface
|
||||
DB-API modules with higher-level statement construction,
|
||||
connection-management, execution and result contexts. The primary
|
||||
"entry point" class into this package is the Engine and its public
|
||||
constructor ``create_engine()``.
|
||||
|
||||
"""
|
||||
|
||||
from . import events
|
||||
from . import util
|
||||
from .base import Connection
|
||||
from .base import Engine
|
||||
from .base import NestedTransaction
|
||||
from .base import RootTransaction
|
||||
from .base import Transaction
|
||||
from .base import TwoPhaseTransaction
|
||||
from .create import create_engine
|
||||
from .create import engine_from_config
|
||||
from .cursor import BaseCursorResult
|
||||
from .cursor import BufferedColumnResultProxy
|
||||
from .cursor import BufferedColumnRow
|
||||
from .cursor import BufferedRowResultProxy
|
||||
from .cursor import CursorResult
|
||||
from .cursor import FullyBufferedResultProxy
|
||||
from .cursor import LegacyCursorResult
|
||||
from .cursor import ResultProxy
|
||||
from .interfaces import AdaptedConnection
|
||||
from .interfaces import Compiled
|
||||
from .interfaces import Connectable
|
||||
from .interfaces import CreateEnginePlugin
|
||||
from .interfaces import Dialect
|
||||
from .interfaces import ExceptionContext
|
||||
from .interfaces import ExecutionContext
|
||||
from .interfaces import TypeCompiler
|
||||
from .mock import create_mock_engine
|
||||
from .reflection import Inspector
|
||||
from .result import ChunkedIteratorResult
|
||||
from .result import FilterResult
|
||||
from .result import FrozenResult
|
||||
from .result import IteratorResult
|
||||
from .result import MappingResult
|
||||
from .result import MergedResult
|
||||
from .result import Result
|
||||
from .result import result_tuple
|
||||
from .result import ScalarResult
|
||||
from .row import BaseRow
|
||||
from .row import LegacyRow
|
||||
from .row import Row
|
||||
from .row import RowMapping
|
||||
from .url import make_url
|
||||
from .url import URL
|
||||
from .util import connection_memoize
|
||||
from ..sql import ddl
|
||||
3450
lib/sqlalchemy/engine/base.py
Normal file
3450
lib/sqlalchemy/engine/base.py
Normal file
File diff suppressed because it is too large
Load Diff
56
lib/sqlalchemy/engine/characteristics.py
Normal file
56
lib/sqlalchemy/engine/characteristics.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import abc
|
||||
|
||||
from ..util import ABC
|
||||
|
||||
|
||||
class ConnectionCharacteristic(ABC):
|
||||
"""An abstract base for an object that can set, get and reset a
|
||||
per-connection characteristic, typically one that gets reset when the
|
||||
connection is returned to the connection pool.
|
||||
|
||||
transaction isolation is the canonical example, and the
|
||||
``IsolationLevelCharacteristic`` implementation provides this for the
|
||||
``DefaultDialect``.
|
||||
|
||||
The ``ConnectionCharacteristic`` class should call upon the ``Dialect`` for
|
||||
the implementation of each method. The object exists strictly to serve as
|
||||
a dialect visitor that can be placed into the
|
||||
``DefaultDialect.connection_characteristics`` dictionary where it will take
|
||||
effect for calls to :meth:`_engine.Connection.execution_options` and
|
||||
related APIs.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
transactional = False
|
||||
|
||||
@abc.abstractmethod
|
||||
def reset_characteristic(self, dialect, dbapi_conn):
|
||||
"""Reset the characteristic on the connection to its default value."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_characteristic(self, dialect, dbapi_conn, value):
|
||||
"""set characteristic on the connection to a given value."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_characteristic(self, dialect, dbapi_conn):
|
||||
"""Given a DBAPI connection, get the current value of the
|
||||
characteristic.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class IsolationLevelCharacteristic(ConnectionCharacteristic):
|
||||
transactional = True
|
||||
|
||||
def reset_characteristic(self, dialect, dbapi_conn):
|
||||
dialect.reset_isolation_level(dbapi_conn)
|
||||
|
||||
def set_characteristic(self, dialect, dbapi_conn, value):
|
||||
dialect.set_isolation_level(dbapi_conn, value)
|
||||
|
||||
def get_characteristic(self, dialect, dbapi_conn):
|
||||
return dialect.get_isolation_level(dbapi_conn)
|
||||
743
lib/sqlalchemy/engine/create.py
Normal file
743
lib/sqlalchemy/engine/create.py
Normal file
@@ -0,0 +1,743 @@
|
||||
# engine/create.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from . import base
|
||||
from . import url as _url
|
||||
from .mock import create_mock_engine
|
||||
from .. import event
|
||||
from .. import exc
|
||||
from .. import pool as poollib
|
||||
from .. import util
|
||||
from ..sql import compiler
|
||||
|
||||
|
||||
@util.deprecated_params(
|
||||
strategy=(
|
||||
"1.4",
|
||||
"The :paramref:`_sa.create_engine.strategy` keyword is deprecated, "
|
||||
"and the only argument accepted is 'mock'; please use "
|
||||
":func:`.create_mock_engine` going forward. For general "
|
||||
"customization of create_engine which may have been accomplished "
|
||||
"using strategies, see :class:`.CreateEnginePlugin`.",
|
||||
),
|
||||
empty_in_strategy=(
|
||||
"1.4",
|
||||
"The :paramref:`_sa.create_engine.empty_in_strategy` keyword is "
|
||||
"deprecated, and no longer has any effect. All IN expressions "
|
||||
"are now rendered using "
|
||||
'the "expanding parameter" strategy which renders a set of bound'
|
||||
'expressions, or an "empty set" SELECT, at statement execution'
|
||||
"time.",
|
||||
),
|
||||
case_sensitive=(
|
||||
"1.4",
|
||||
"The :paramref:`_sa.create_engine.case_sensitive` parameter "
|
||||
"is deprecated and will be removed in a future release. "
|
||||
"Applications should work with result column names in a case "
|
||||
"sensitive fashion.",
|
||||
),
|
||||
)
|
||||
def create_engine(url, **kwargs):
|
||||
"""Create a new :class:`_engine.Engine` instance.
|
||||
|
||||
The standard calling form is to send the :ref:`URL <database_urls>` as the
|
||||
first positional argument, usually a string
|
||||
that indicates database dialect and connection arguments::
|
||||
|
||||
engine = create_engine("postgresql://scott:tiger@localhost/test")
|
||||
|
||||
.. note::
|
||||
|
||||
Please review :ref:`database_urls` for general guidelines in composing
|
||||
URL strings. In particular, special characters, such as those often
|
||||
part of passwords, must be URL encoded to be properly parsed.
|
||||
|
||||
Additional keyword arguments may then follow it which
|
||||
establish various options on the resulting :class:`_engine.Engine`
|
||||
and its underlying :class:`.Dialect` and :class:`_pool.Pool`
|
||||
constructs::
|
||||
|
||||
engine = create_engine("mysql://scott:tiger@hostname/dbname",
|
||||
encoding='latin1', echo=True)
|
||||
|
||||
The string form of the URL is
|
||||
``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
|
||||
``dialect`` is a database name such as ``mysql``, ``oracle``,
|
||||
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
|
||||
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
|
||||
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
|
||||
|
||||
``**kwargs`` takes a wide variety of options which are routed
|
||||
towards their appropriate components. Arguments may be specific to
|
||||
the :class:`_engine.Engine`, the underlying :class:`.Dialect`,
|
||||
as well as the
|
||||
:class:`_pool.Pool`. Specific dialects also accept keyword arguments that
|
||||
are unique to that dialect. Here, we describe the parameters
|
||||
that are common to most :func:`_sa.create_engine()` usage.
|
||||
|
||||
Once established, the newly resulting :class:`_engine.Engine` will
|
||||
request a connection from the underlying :class:`_pool.Pool` once
|
||||
:meth:`_engine.Engine.connect` is called, or a method which depends on it
|
||||
such as :meth:`_engine.Engine.execute` is invoked. The
|
||||
:class:`_pool.Pool` in turn
|
||||
will establish the first actual DBAPI connection when this request
|
||||
is received. The :func:`_sa.create_engine` call itself does **not**
|
||||
establish any actual DBAPI connections directly.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:doc:`/core/engines`
|
||||
|
||||
:doc:`/dialects/index`
|
||||
|
||||
:ref:`connections_toplevel`
|
||||
|
||||
:param case_sensitive: if False, result column names
|
||||
will match in a case-insensitive fashion, that is,
|
||||
``row['SomeColumn']``.
|
||||
|
||||
:param connect_args: a dictionary of options which will be
|
||||
passed directly to the DBAPI's ``connect()`` method as
|
||||
additional keyword arguments. See the example
|
||||
at :ref:`custom_dbapi_args`.
|
||||
|
||||
:param convert_unicode=False: if set to True, causes
|
||||
all :class:`.String` datatypes to act as though the
|
||||
:paramref:`.String.convert_unicode` flag has been set to ``True``,
|
||||
regardless of a setting of ``False`` on an individual :class:`.String`
|
||||
type. This has the effect of causing all :class:`.String` -based
|
||||
columns to accommodate Python Unicode objects directly as though the
|
||||
datatype were the :class:`.Unicode` type.
|
||||
|
||||
.. deprecated:: 1.3
|
||||
|
||||
The :paramref:`_sa.create_engine.convert_unicode` parameter
|
||||
is deprecated and will be removed in a future release.
|
||||
All modern DBAPIs now support Python Unicode directly and this
|
||||
parameter is unnecessary.
|
||||
|
||||
:param creator: a callable which returns a DBAPI connection.
|
||||
This creation function will be passed to the underlying
|
||||
connection pool and will be used to create all new database
|
||||
connections. Usage of this function causes connection
|
||||
parameters specified in the URL argument to be bypassed.
|
||||
|
||||
This hook is not as flexible as the newer
|
||||
:meth:`_events.DialectEvents.do_connect` hook which allows complete
|
||||
control over how a connection is made to the database, given the full
|
||||
set of URL arguments and state beforehand.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.DialectEvents.do_connect` - event hook that allows
|
||||
full control over DBAPI connection mechanics.
|
||||
|
||||
:ref:`custom_dbapi_args`
|
||||
|
||||
:param echo=False: if True, the Engine will log all statements
|
||||
as well as a ``repr()`` of their parameter lists to the default log
|
||||
handler, which defaults to ``sys.stdout`` for output. If set to the
|
||||
string ``"debug"``, result rows will be printed to the standard output
|
||||
as well. The ``echo`` attribute of ``Engine`` can be modified at any
|
||||
time to turn logging on and off; direct control of logging is also
|
||||
available using the standard Python ``logging`` module.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
|
||||
:param echo_pool=False: if True, the connection pool will log
|
||||
informational output such as when connections are invalidated
|
||||
as well as when connections are recycled to the default log handler,
|
||||
which defaults to ``sys.stdout`` for output. If set to the string
|
||||
``"debug"``, the logging will include pool checkouts and checkins.
|
||||
Direct control of logging is also available using the standard Python
|
||||
``logging`` module.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
|
||||
:param empty_in_strategy: No longer used; SQLAlchemy now uses
|
||||
"empty set" behavior for IN in all cases.
|
||||
|
||||
:param enable_from_linting: defaults to True. Will emit a warning
|
||||
if a given SELECT statement is found to have un-linked FROM elements
|
||||
which would cause a cartesian product.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`change_4737`
|
||||
|
||||
:param encoding: **legacy Python 2 value only, where it only applies to
|
||||
specific DBAPIs, not used in Python 3 for any modern DBAPI driver.
|
||||
Please refer to individual dialect documentation for client encoding
|
||||
behaviors.** Defaults to the string value ``utf-8``. This value
|
||||
refers **only** to the character encoding that is used when SQLAlchemy
|
||||
sends or receives data from a :term:`DBAPI` that does not support
|
||||
Python Unicode and **is only used under Python 2**, only for certain
|
||||
DBAPI drivers, and only in certain circumstances. **Python 3 users
|
||||
please DISREGARD this parameter and refer to the documentation for the
|
||||
specific dialect in use in order to configure character encoding
|
||||
behavior.**
|
||||
|
||||
.. note:: The ``encoding`` parameter deals only with in-Python
|
||||
encoding issues that were prevalent with **some DBAPIS only**
|
||||
under **Python 2 only**. Under Python 3 it is not used by
|
||||
any modern dialect. For DBAPIs that require
|
||||
client encoding configurations, which are most of those outside
|
||||
of SQLite, please consult specific :ref:`dialect documentation
|
||||
<dialect_toplevel>` for details.
|
||||
|
||||
All modern DBAPIs that work in Python 3 necessarily feature direct
|
||||
support for Python unicode strings. Under Python 2, this was not
|
||||
always the case. For those scenarios where the DBAPI is detected as
|
||||
not supporting a Python ``unicode`` object under Python 2, this
|
||||
encoding is used to determine the source/destination encoding. It is
|
||||
**not used** for those cases where the DBAPI handles unicode directly.
|
||||
|
||||
To properly configure a system to accommodate Python ``unicode``
|
||||
objects, the DBAPI should be configured to handle unicode to the
|
||||
greatest degree as is appropriate - see the notes on unicode pertaining
|
||||
to the specific target database in use at :ref:`dialect_toplevel`.
|
||||
|
||||
Areas where string encoding may need to be accommodated
|
||||
outside of the DBAPI, nearly always under **Python 2 only**,
|
||||
include zero or more of:
|
||||
|
||||
* the values passed to bound parameters, corresponding to
|
||||
the :class:`.Unicode` type or the :class:`.String` type
|
||||
when ``convert_unicode`` is ``True``;
|
||||
* the values returned in result set columns corresponding
|
||||
to the :class:`.Unicode` type or the :class:`.String`
|
||||
type when ``convert_unicode`` is ``True``;
|
||||
* the string SQL statement passed to the DBAPI's
|
||||
``cursor.execute()`` method;
|
||||
* the string names of the keys in the bound parameter
|
||||
dictionary passed to the DBAPI's ``cursor.execute()``
|
||||
as well as ``cursor.setinputsizes()`` methods;
|
||||
* the string column names retrieved from the DBAPI's
|
||||
``cursor.description`` attribute.
|
||||
|
||||
When using Python 3, the DBAPI is required to support all of the above
|
||||
values as Python ``unicode`` objects, which in Python 3 are just known
|
||||
as ``str``. In Python 2, the DBAPI does not specify unicode behavior
|
||||
at all, so SQLAlchemy must make decisions for each of the above values
|
||||
on a per-DBAPI basis - implementations are completely inconsistent in
|
||||
their behavior.
|
||||
|
||||
:param execution_options: Dictionary execution options which will
|
||||
be applied to all connections. See
|
||||
:meth:`~sqlalchemy.engine.Connection.execution_options`
|
||||
|
||||
:param future: Use the 2.0 style :class:`_future.Engine` and
|
||||
:class:`_future.Connection` API.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`migration_20_toplevel`
|
||||
|
||||
:param hide_parameters: Boolean, when set to True, SQL statement parameters
|
||||
will not be displayed in INFO logging nor will they be formatted into
|
||||
the string representation of :class:`.StatementError` objects.
|
||||
|
||||
.. versionadded:: 1.3.8
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
:param implicit_returning=True: Legacy flag that when set to ``False``
|
||||
will disable the use of ``RETURNING`` on supporting backends where it
|
||||
would normally be used to fetch newly generated primary key values for
|
||||
single-row INSERT statements that do not otherwise specify a RETURNING
|
||||
clause. This behavior applies primarily to the PostgreSQL, Oracle,
|
||||
SQL Server backends.
|
||||
|
||||
.. warning:: this flag originally allowed the "implicit returning"
|
||||
feature to be *enabled* back when it was very new and there was not
|
||||
well-established database support. In modern SQLAlchemy, this flag
|
||||
should **always be set to True**. Some SQLAlchemy features will
|
||||
fail to function properly if this flag is set to ``False``.
|
||||
|
||||
:param isolation_level: this string parameter is interpreted by various
|
||||
dialects in order to affect the transaction isolation level of the
|
||||
database connection. The parameter essentially accepts some subset of
|
||||
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE READ"``,
|
||||
``"READ COMMITTED"``, ``"READ UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
|
||||
Behavior here varies per backend, and
|
||||
individual dialects should be consulted directly.
|
||||
|
||||
Note that the isolation level can also be set on a
|
||||
per-:class:`_engine.Connection` basis as well, using the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
feature.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbapi_autocommit`
|
||||
|
||||
:param json_deserializer: for dialects that support the
|
||||
:class:`_types.JSON`
|
||||
datatype, this is a Python callable that will convert a JSON string
|
||||
to a Python object. By default, the Python ``json.loads`` function is
|
||||
used.
|
||||
|
||||
.. versionchanged:: 1.3.7 The SQLite dialect renamed this from
|
||||
``_json_deserializer``.
|
||||
|
||||
:param json_serializer: for dialects that support the :class:`_types.JSON`
|
||||
datatype, this is a Python callable that will render a given object
|
||||
as JSON. By default, the Python ``json.dumps`` function is used.
|
||||
|
||||
.. versionchanged:: 1.3.7 The SQLite dialect renamed this from
|
||||
``_json_serializer``.
|
||||
|
||||
|
||||
:param label_length=None: optional integer value which limits
|
||||
the size of dynamically generated column labels to that many
|
||||
characters. If less than 6, labels are generated as
|
||||
"_(counter)". If ``None``, the value of
|
||||
``dialect.max_identifier_length``, which may be affected via the
|
||||
:paramref:`_sa.create_engine.max_identifier_length` parameter,
|
||||
is used instead. The value of
|
||||
:paramref:`_sa.create_engine.label_length`
|
||||
may not be larger than that of
|
||||
:paramref:`_sa.create_engine.max_identfier_length`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`_sa.create_engine.max_identifier_length`
|
||||
|
||||
:param listeners: A list of one or more
|
||||
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
|
||||
receive connection pool events.
|
||||
|
||||
:param logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.engine" logger. Defaults to a hexstring of the
|
||||
object's id.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
:paramref:`_engine.Connection.execution_options.logging_token`
|
||||
|
||||
|
||||
|
||||
:param max_identifier_length: integer; override the max_identifier_length
|
||||
determined by the dialect. if ``None`` or zero, has no effect. This
|
||||
is the database's configured maximum number of characters that may be
|
||||
used in a SQL identifier such as a table name, column name, or label
|
||||
name. All dialects determine this value automatically, however in the
|
||||
case of a new database version for which this value has changed but
|
||||
SQLAlchemy's dialect has not been adjusted, the value may be passed
|
||||
here.
|
||||
|
||||
.. versionadded:: 1.3.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`_sa.create_engine.label_length`
|
||||
|
||||
:param max_overflow=10: the number of connections to allow in
|
||||
connection pool "overflow", that is connections that can be
|
||||
opened above and beyond the pool_size setting, which defaults
|
||||
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param module=None: reference to a Python module object (the module
|
||||
itself, not its string name). Specifies an alternate DBAPI module to
|
||||
be used by the engine's dialect. Each sub-dialect references a
|
||||
specific DBAPI which will be imported before first connect. This
|
||||
parameter causes the import to be bypassed, and the given module to
|
||||
be used instead. Can be used for testing of DBAPIs as well as to
|
||||
inject "mock" DBAPI implementations into the :class:`_engine.Engine`.
|
||||
|
||||
:param paramstyle=None: The `paramstyle <https://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
|
||||
to use when rendering bound parameters. This style defaults to the
|
||||
one recommended by the DBAPI itself, which is retrieved from the
|
||||
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
|
||||
more than one paramstyle, and in particular it may be desirable
|
||||
to change a "named" paramstyle into a "positional" one, or vice versa.
|
||||
When this attribute is passed, it should be one of the values
|
||||
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
|
||||
``"pyformat"``, and should correspond to a parameter style known
|
||||
to be supported by the DBAPI in use.
|
||||
|
||||
:param pool=None: an already-constructed instance of
|
||||
:class:`~sqlalchemy.pool.Pool`, such as a
|
||||
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
|
||||
pool will be used directly as the underlying connection pool
|
||||
for the engine, bypassing whatever connection parameters are
|
||||
present in the URL argument. For information on constructing
|
||||
connection pools manually, see :ref:`pooling_toplevel`.
|
||||
|
||||
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
|
||||
subclass, which will be used to create a connection pool
|
||||
instance using the connection parameters given in the URL. Note
|
||||
this differs from ``pool`` in that you don't actually
|
||||
instantiate the pool in this case, you just indicate what type
|
||||
of pool to be used.
|
||||
|
||||
:param pool_logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
|
||||
id.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
|
||||
:param pool_pre_ping: boolean, if True will enable the connection pool
|
||||
"pre-ping" feature that tests connections for liveness upon
|
||||
each checkout.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_disconnects_pessimistic`
|
||||
|
||||
:param pool_size=5: the number of connections to keep open
|
||||
inside the connection pool. This used with
|
||||
:class:`~sqlalchemy.pool.QueuePool` as
|
||||
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
|
||||
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
|
||||
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
|
||||
:class:`~sqlalchemy.pool.NullPool` instead.
|
||||
|
||||
:param pool_recycle=-1: this setting causes the pool to recycle
|
||||
connections after the given number of seconds has passed. It
|
||||
defaults to -1, or no timeout. For example, setting to 3600
|
||||
means connections will be recycled after one hour. Note that
|
||||
MySQL in particular will disconnect automatically if no
|
||||
activity is detected on a connection for eight hours (although
|
||||
this is configurable with the MySQLDB connection itself and the
|
||||
server configuration as well).
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_setting_recycle`
|
||||
|
||||
:param pool_reset_on_return='rollback': set the
|
||||
:paramref:`_pool.Pool.reset_on_return` parameter of the underlying
|
||||
:class:`_pool.Pool` object, which can be set to the values
|
||||
``"rollback"``, ``"commit"``, or ``None``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`_pool.Pool.reset_on_return`
|
||||
|
||||
:param pool_timeout=30: number of seconds to wait before giving
|
||||
up on getting a connection from the pool. This is only used
|
||||
with :class:`~sqlalchemy.pool.QueuePool`. This can be a float but is
|
||||
subject to the limitations of Python time functions which may not be
|
||||
reliable in the tens of milliseconds.
|
||||
|
||||
.. note: don't use 30.0 above, it seems to break with the :param tag
|
||||
|
||||
:param pool_use_lifo=False: use LIFO (last-in-first-out) when retrieving
|
||||
connections from :class:`.QueuePool` instead of FIFO
|
||||
(first-in-first-out). Using LIFO, a server-side timeout scheme can
|
||||
reduce the number of connections used during non- peak periods of
|
||||
use. When planning for server-side timeouts, ensure that a recycle or
|
||||
pre-ping strategy is in use to gracefully handle stale connections.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_use_lifo`
|
||||
|
||||
:ref:`pool_disconnects`
|
||||
|
||||
:param plugins: string list of plugin names to load. See
|
||||
:class:`.CreateEnginePlugin` for background.
|
||||
|
||||
.. versionadded:: 1.2.3
|
||||
|
||||
:param query_cache_size: size of the cache used to cache the SQL string
|
||||
form of queries. Set to zero to disable caching.
|
||||
|
||||
The cache is pruned of its least recently used items when its size reaches
|
||||
N * 1.5. Defaults to 500, meaning the cache will always store at least
|
||||
500 SQL statements when filled, and will grow up to 750 items at which
|
||||
point it is pruned back down to 500 by removing the 250 least recently
|
||||
used items.
|
||||
|
||||
Caching is accomplished on a per-statement basis by generating a
|
||||
cache key that represents the statement's structure, then generating
|
||||
string SQL for the current dialect only if that key is not present
|
||||
in the cache. All statements support caching, however some features
|
||||
such as an INSERT with a large set of parameters will intentionally
|
||||
bypass the cache. SQL logging will indicate statistics for each
|
||||
statement whether or not it were pull from the cache.
|
||||
|
||||
.. note:: some ORM functions related to unit-of-work persistence as well
|
||||
as some attribute loading strategies will make use of individual
|
||||
per-mapper caches outside of the main cache.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`sql_caching`
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
""" # noqa
|
||||
|
||||
if "strategy" in kwargs:
|
||||
strat = kwargs.pop("strategy")
|
||||
if strat == "mock":
|
||||
return create_mock_engine(url, **kwargs)
|
||||
else:
|
||||
raise exc.ArgumentError("unknown strategy: %r" % strat)
|
||||
|
||||
kwargs.pop("empty_in_strategy", None)
|
||||
|
||||
# create url.URL object
|
||||
u = _url.make_url(url)
|
||||
|
||||
u, plugins, kwargs = u._instantiate_plugins(kwargs)
|
||||
|
||||
entrypoint = u._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(u)
|
||||
|
||||
if kwargs.pop("_coerce_config", False):
|
||||
|
||||
def pop_kwarg(key, default=None):
|
||||
value = kwargs.pop(key, default)
|
||||
if key in dialect_cls.engine_config_types:
|
||||
value = dialect_cls.engine_config_types[key](value)
|
||||
return value
|
||||
|
||||
else:
|
||||
pop_kwarg = kwargs.pop
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = pop_kwarg(k)
|
||||
|
||||
dbapi = kwargs.pop("module", None)
|
||||
if dbapi is None:
|
||||
dbapi_args = {}
|
||||
for k in util.get_func_kwargs(dialect_cls.dbapi):
|
||||
if k in kwargs:
|
||||
dbapi_args[k] = pop_kwarg(k)
|
||||
dbapi = dialect_cls.dbapi(**dbapi_args)
|
||||
|
||||
dialect_args["dbapi"] = dbapi
|
||||
|
||||
dialect_args.setdefault("compiler_linting", compiler.NO_LINTING)
|
||||
enable_from_linting = kwargs.pop("enable_from_linting", True)
|
||||
if enable_from_linting:
|
||||
dialect_args["compiler_linting"] ^= compiler.COLLECT_CARTESIAN_PRODUCTS
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_dialect_kwargs(dialect_cls, dialect_args)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
# assemble connection arguments
|
||||
(cargs, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(pop_kwarg("connect_args", {}))
|
||||
cargs = list(cargs) # allow mutability
|
||||
|
||||
# look for existing pool or create
|
||||
pool = pop_kwarg("pool", None)
|
||||
if pool is None:
|
||||
|
||||
def connect(connection_record=None):
|
||||
if dialect._has_events:
|
||||
for fn in dialect.dispatch.do_connect:
|
||||
connection = fn(dialect, connection_record, cargs, cparams)
|
||||
if connection is not None:
|
||||
return connection
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
|
||||
creator = pop_kwarg("creator", connect)
|
||||
|
||||
poolclass = pop_kwarg("poolclass", None)
|
||||
if poolclass is None:
|
||||
poolclass = dialect.get_dialect_pool_class(u)
|
||||
pool_args = {"dialect": dialect}
|
||||
|
||||
# consume pool arguments from kwargs, translating a few of
|
||||
# the arguments
|
||||
translate = {
|
||||
"logging_name": "pool_logging_name",
|
||||
"echo": "echo_pool",
|
||||
"timeout": "pool_timeout",
|
||||
"recycle": "pool_recycle",
|
||||
"events": "pool_events",
|
||||
"reset_on_return": "pool_reset_on_return",
|
||||
"pre_ping": "pool_pre_ping",
|
||||
"use_lifo": "pool_use_lifo",
|
||||
}
|
||||
for k in util.get_cls_kwargs(poolclass):
|
||||
tk = translate.get(k, k)
|
||||
if tk in kwargs:
|
||||
pool_args[k] = pop_kwarg(tk)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_pool_kwargs(poolclass, pool_args)
|
||||
|
||||
pool = poolclass(creator, **pool_args)
|
||||
else:
|
||||
if isinstance(pool, poollib.dbapi_proxy._DBProxy):
|
||||
pool = pool.get_pool(*cargs, **cparams)
|
||||
|
||||
pool._dialect = dialect
|
||||
|
||||
# create engine.
|
||||
if pop_kwarg("future", False):
|
||||
from sqlalchemy import future
|
||||
|
||||
default_engine_class = future.Engine
|
||||
else:
|
||||
default_engine_class = base.Engine
|
||||
|
||||
engineclass = kwargs.pop("_future_engine_class", default_engine_class)
|
||||
|
||||
engine_args = {}
|
||||
for k in util.get_cls_kwargs(engineclass):
|
||||
if k in kwargs:
|
||||
engine_args[k] = pop_kwarg(k)
|
||||
|
||||
# internal flags used by the test suite for instrumenting / proxying
|
||||
# engines with mocks etc.
|
||||
_initialize = kwargs.pop("_initialize", True)
|
||||
_wrap_do_on_connect = kwargs.pop("_wrap_do_on_connect", None)
|
||||
|
||||
# all kwargs should be consumed
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"Invalid argument(s) %s sent to create_engine(), "
|
||||
"using configuration %s/%s/%s. Please check that the "
|
||||
"keyword arguments are appropriate for this combination "
|
||||
"of components."
|
||||
% (
|
||||
",".join("'%s'" % k for k in kwargs),
|
||||
dialect.__class__.__name__,
|
||||
pool.__class__.__name__,
|
||||
engineclass.__name__,
|
||||
)
|
||||
)
|
||||
|
||||
engine = engineclass(pool, dialect, u, **engine_args)
|
||||
|
||||
if _initialize:
|
||||
do_on_connect = dialect.on_connect_url(u)
|
||||
if do_on_connect:
|
||||
if _wrap_do_on_connect:
|
||||
do_on_connect = _wrap_do_on_connect(do_on_connect)
|
||||
|
||||
def on_connect(dbapi_connection, connection_record):
|
||||
do_on_connect(dbapi_connection)
|
||||
|
||||
event.listen(pool, "connect", on_connect)
|
||||
|
||||
def first_connect(dbapi_connection, connection_record):
|
||||
c = base.Connection(
|
||||
engine,
|
||||
connection=dbapi_connection,
|
||||
_has_events=False,
|
||||
# reconnecting will be a reentrant condition, so if the
|
||||
# connection goes away, Connection is then closed
|
||||
_allow_revalidate=False,
|
||||
)
|
||||
c._execution_options = util.EMPTY_DICT
|
||||
|
||||
try:
|
||||
dialect.initialize(c)
|
||||
finally:
|
||||
# note that "invalidated" and "closed" are mutually
|
||||
# exclusive in 1.4 Connection.
|
||||
if not c.invalidated and not c.closed:
|
||||
# transaction is rolled back otherwise, tested by
|
||||
# test/dialect/postgresql/test_dialect.py
|
||||
# ::MiscBackendTest::test_initial_transaction_state
|
||||
dialect.do_rollback(c.connection)
|
||||
|
||||
# previously, the "first_connect" event was used here, which was then
|
||||
# scaled back if the "on_connect" handler were present. now,
|
||||
# since "on_connect" is virtually always present, just use
|
||||
# "connect" event with once_unless_exception in all cases so that
|
||||
# the connection event flow is consistent in all cases.
|
||||
event.listen(
|
||||
pool, "connect", first_connect, _once_unless_exception=True
|
||||
)
|
||||
|
||||
dialect_cls.engine_created(engine)
|
||||
if entrypoint is not dialect_cls:
|
||||
entrypoint.engine_created(engine)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.engine_created(engine)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
def engine_from_config(configuration, prefix="sqlalchemy.", **kwargs):
|
||||
"""Create a new Engine instance using a configuration dictionary.
|
||||
|
||||
The dictionary is typically produced from a config file.
|
||||
|
||||
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
|
||||
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
|
||||
indicates the prefix to be searched for. Each matching key (after the
|
||||
prefix is stripped) is treated as though it were the corresponding keyword
|
||||
argument to a :func:`_sa.create_engine` call.
|
||||
|
||||
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
|
||||
which provides the :ref:`database URL <database_urls>`.
|
||||
|
||||
A select set of keyword arguments will be "coerced" to their
|
||||
expected type based on string values. The set of arguments
|
||||
is extensible per-dialect using the ``engine_config_types`` accessor.
|
||||
|
||||
:param configuration: A dictionary (typically produced from a config file,
|
||||
but this is not a requirement). Items whose keys start with the value
|
||||
of 'prefix' will have that prefix stripped, and will then be passed to
|
||||
:func:`_sa.create_engine`.
|
||||
|
||||
:param prefix: Prefix to match and then strip from keys
|
||||
in 'configuration'.
|
||||
|
||||
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
|
||||
overrides the corresponding item taken from the 'configuration'
|
||||
dictionary. Keyword arguments should *not* be prefixed.
|
||||
|
||||
"""
|
||||
|
||||
options = dict(
|
||||
(key[len(prefix) :], configuration[key])
|
||||
for key in configuration
|
||||
if key.startswith(prefix)
|
||||
)
|
||||
options["_coerce_config"] = True
|
||||
options.update(kwargs)
|
||||
url = options.pop("url")
|
||||
return create_engine(url, **options)
|
||||
1942
lib/sqlalchemy/engine/cursor.py
Normal file
1942
lib/sqlalchemy/engine/cursor.py
Normal file
File diff suppressed because it is too large
Load Diff
1936
lib/sqlalchemy/engine/default.py
Normal file
1936
lib/sqlalchemy/engine/default.py
Normal file
File diff suppressed because it is too large
Load Diff
835
lib/sqlalchemy/engine/events.py
Normal file
835
lib/sqlalchemy/engine/events.py
Normal file
@@ -0,0 +1,835 @@
|
||||
# sqlalchemy/engine/events.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from .base import Engine
|
||||
from .interfaces import Connectable
|
||||
from .interfaces import Dialect
|
||||
from .. import event
|
||||
from .. import exc
|
||||
|
||||
|
||||
class ConnectionEvents(event.Events):
|
||||
"""Available events for :class:`.Connectable`, which includes
|
||||
:class:`_engine.Connection` and :class:`_engine.Engine`.
|
||||
|
||||
The methods here define the name of an event as well as the names of
|
||||
members that are passed to listener functions.
|
||||
|
||||
An event listener can be associated with any :class:`.Connectable`
|
||||
class or instance, such as an :class:`_engine.Engine`, e.g.::
|
||||
|
||||
from sqlalchemy import event, create_engine
|
||||
|
||||
def before_cursor_execute(conn, cursor, statement, parameters, context,
|
||||
executemany):
|
||||
log.info("Received statement: %s", statement)
|
||||
|
||||
engine = create_engine('postgresql://scott:tiger@localhost/test')
|
||||
event.listen(engine, "before_cursor_execute", before_cursor_execute)
|
||||
|
||||
or with a specific :class:`_engine.Connection`::
|
||||
|
||||
with engine.begin() as conn:
|
||||
@event.listens_for(conn, 'before_cursor_execute')
|
||||
def before_cursor_execute(conn, cursor, statement, parameters,
|
||||
context, executemany):
|
||||
log.info("Received statement: %s", statement)
|
||||
|
||||
When the methods are called with a `statement` parameter, such as in
|
||||
:meth:`.after_cursor_execute` or :meth:`.before_cursor_execute`,
|
||||
the statement is the exact SQL string that was prepared for transmission
|
||||
to the DBAPI ``cursor`` in the connection's :class:`.Dialect`.
|
||||
|
||||
The :meth:`.before_execute` and :meth:`.before_cursor_execute`
|
||||
events can also be established with the ``retval=True`` flag, which
|
||||
allows modification of the statement and parameters to be sent
|
||||
to the database. The :meth:`.before_cursor_execute` event is
|
||||
particularly useful here to add ad-hoc string transformations, such
|
||||
as comments, to all executions::
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy import event
|
||||
|
||||
@event.listens_for(Engine, "before_cursor_execute", retval=True)
|
||||
def comment_sql_calls(conn, cursor, statement, parameters,
|
||||
context, executemany):
|
||||
statement = statement + " -- some comment"
|
||||
return statement, parameters
|
||||
|
||||
.. note:: :class:`_events.ConnectionEvents` can be established on any
|
||||
combination of :class:`_engine.Engine`, :class:`_engine.Connection`,
|
||||
as well
|
||||
as instances of each of those classes. Events across all
|
||||
four scopes will fire off for a given instance of
|
||||
:class:`_engine.Connection`. However, for performance reasons, the
|
||||
:class:`_engine.Connection` object determines at instantiation time
|
||||
whether or not its parent :class:`_engine.Engine` has event listeners
|
||||
established. Event listeners added to the :class:`_engine.Engine`
|
||||
class or to an instance of :class:`_engine.Engine`
|
||||
*after* the instantiation
|
||||
of a dependent :class:`_engine.Connection` instance will usually
|
||||
*not* be available on that :class:`_engine.Connection` instance.
|
||||
The newly
|
||||
added listeners will instead take effect for
|
||||
:class:`_engine.Connection`
|
||||
instances created subsequent to those event listeners being
|
||||
established on the parent :class:`_engine.Engine` class or instance.
|
||||
|
||||
:param retval=False: Applies to the :meth:`.before_execute` and
|
||||
:meth:`.before_cursor_execute` events only. When True, the
|
||||
user-defined event function must have a return value, which
|
||||
is a tuple of parameters that replace the given statement
|
||||
and parameters. See those methods for a description of
|
||||
specific return arguments.
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeEngine"
|
||||
_dispatch_target = Connectable
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, retval=False):
|
||||
target, identifier, fn = (
|
||||
event_key.dispatch_target,
|
||||
event_key.identifier,
|
||||
event_key._listen_fn,
|
||||
)
|
||||
|
||||
target._has_events = True
|
||||
|
||||
if not retval:
|
||||
if identifier == "before_execute":
|
||||
orig_fn = fn
|
||||
|
||||
def wrap_before_execute(
|
||||
conn, clauseelement, multiparams, params, execution_options
|
||||
):
|
||||
orig_fn(
|
||||
conn,
|
||||
clauseelement,
|
||||
multiparams,
|
||||
params,
|
||||
execution_options,
|
||||
)
|
||||
return clauseelement, multiparams, params
|
||||
|
||||
fn = wrap_before_execute
|
||||
elif identifier == "before_cursor_execute":
|
||||
orig_fn = fn
|
||||
|
||||
def wrap_before_cursor_execute(
|
||||
conn, cursor, statement, parameters, context, executemany
|
||||
):
|
||||
orig_fn(
|
||||
conn,
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context,
|
||||
executemany,
|
||||
)
|
||||
return statement, parameters
|
||||
|
||||
fn = wrap_before_cursor_execute
|
||||
elif retval and identifier not in (
|
||||
"before_execute",
|
||||
"before_cursor_execute",
|
||||
"handle_error",
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"Only the 'before_execute', "
|
||||
"'before_cursor_execute' and 'handle_error' engine "
|
||||
"event listeners accept the 'retval=True' "
|
||||
"argument."
|
||||
)
|
||||
event_key.with_wrapper(fn).base_listen()
|
||||
|
||||
@event._legacy_signature(
|
||||
"1.4",
|
||||
["conn", "clauseelement", "multiparams", "params"],
|
||||
lambda conn, clauseelement, multiparams, params, execution_options: (
|
||||
conn,
|
||||
clauseelement,
|
||||
multiparams,
|
||||
params,
|
||||
),
|
||||
)
|
||||
def before_execute(
|
||||
self, conn, clauseelement, multiparams, params, execution_options
|
||||
):
|
||||
"""Intercept high level execute() events, receiving uncompiled
|
||||
SQL constructs and other objects prior to rendering into SQL.
|
||||
|
||||
This event is good for debugging SQL compilation issues as well
|
||||
as early manipulation of the parameters being sent to the database,
|
||||
as the parameter lists will be in a consistent format here.
|
||||
|
||||
This event can be optionally established with the ``retval=True``
|
||||
flag. The ``clauseelement``, ``multiparams``, and ``params``
|
||||
arguments should be returned as a three-tuple in this case::
|
||||
|
||||
@event.listens_for(Engine, "before_execute", retval=True)
|
||||
def before_execute(conn, clauseelement, multiparams, params):
|
||||
# do something with clauseelement, multiparams, params
|
||||
return clauseelement, multiparams, params
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param clauseelement: SQL expression construct, :class:`.Compiled`
|
||||
instance, or string statement passed to
|
||||
:meth:`_engine.Connection.execute`.
|
||||
:param multiparams: Multiple parameter sets, a list of dictionaries.
|
||||
:param params: Single parameter set, a single dictionary.
|
||||
:param execution_options: dictionary of execution
|
||||
options passed along with the statement, if any. This is a merge
|
||||
of all options that will be used, including those of the statement,
|
||||
the connection, and those passed in to the method itself for
|
||||
the 2.0 style of execution.
|
||||
|
||||
.. versionadded: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.before_cursor_execute`
|
||||
|
||||
"""
|
||||
|
||||
@event._legacy_signature(
|
||||
"1.4",
|
||||
["conn", "clauseelement", "multiparams", "params", "result"],
|
||||
lambda conn, clauseelement, multiparams, params, execution_options, result: ( # noqa
|
||||
conn,
|
||||
clauseelement,
|
||||
multiparams,
|
||||
params,
|
||||
result,
|
||||
),
|
||||
)
|
||||
def after_execute(
|
||||
self,
|
||||
conn,
|
||||
clauseelement,
|
||||
multiparams,
|
||||
params,
|
||||
execution_options,
|
||||
result,
|
||||
):
|
||||
"""Intercept high level execute() events after execute.
|
||||
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param clauseelement: SQL expression construct, :class:`.Compiled`
|
||||
instance, or string statement passed to
|
||||
:meth:`_engine.Connection.execute`.
|
||||
:param multiparams: Multiple parameter sets, a list of dictionaries.
|
||||
:param params: Single parameter set, a single dictionary.
|
||||
:param execution_options: dictionary of execution
|
||||
options passed along with the statement, if any. This is a merge
|
||||
of all options that will be used, including those of the statement,
|
||||
the connection, and those passed in to the method itself for
|
||||
the 2.0 style of execution.
|
||||
|
||||
.. versionadded: 1.4
|
||||
|
||||
:param result: :class:`_engine.CursorResult` generated by the
|
||||
execution.
|
||||
|
||||
"""
|
||||
|
||||
def before_cursor_execute(
|
||||
self, conn, cursor, statement, parameters, context, executemany
|
||||
):
|
||||
"""Intercept low-level cursor execute() events before execution,
|
||||
receiving the string SQL statement and DBAPI-specific parameter list to
|
||||
be invoked against a cursor.
|
||||
|
||||
This event is a good choice for logging as well as late modifications
|
||||
to the SQL string. It's less ideal for parameter modifications except
|
||||
for those which are specific to a target backend.
|
||||
|
||||
This event can be optionally established with the ``retval=True``
|
||||
flag. The ``statement`` and ``parameters`` arguments should be
|
||||
returned as a two-tuple in this case::
|
||||
|
||||
@event.listens_for(Engine, "before_cursor_execute", retval=True)
|
||||
def before_cursor_execute(conn, cursor, statement,
|
||||
parameters, context, executemany):
|
||||
# do something with statement, parameters
|
||||
return statement, parameters
|
||||
|
||||
See the example at :class:`_events.ConnectionEvents`.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param cursor: DBAPI cursor object
|
||||
:param statement: string SQL statement, as to be passed to the DBAPI
|
||||
:param parameters: Dictionary, tuple, or list of parameters being
|
||||
passed to the ``execute()`` or ``executemany()`` method of the
|
||||
DBAPI ``cursor``. In some cases may be ``None``.
|
||||
:param context: :class:`.ExecutionContext` object in use. May
|
||||
be ``None``.
|
||||
:param executemany: boolean, if ``True``, this is an ``executemany()``
|
||||
call, if ``False``, this is an ``execute()`` call.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.before_execute`
|
||||
|
||||
:meth:`.after_cursor_execute`
|
||||
|
||||
"""
|
||||
|
||||
def after_cursor_execute(
|
||||
self, conn, cursor, statement, parameters, context, executemany
|
||||
):
|
||||
"""Intercept low-level cursor execute() events after execution.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param cursor: DBAPI cursor object. Will have results pending
|
||||
if the statement was a SELECT, but these should not be consumed
|
||||
as they will be needed by the :class:`_engine.CursorResult`.
|
||||
:param statement: string SQL statement, as passed to the DBAPI
|
||||
:param parameters: Dictionary, tuple, or list of parameters being
|
||||
passed to the ``execute()`` or ``executemany()`` method of the
|
||||
DBAPI ``cursor``. In some cases may be ``None``.
|
||||
:param context: :class:`.ExecutionContext` object in use. May
|
||||
be ``None``.
|
||||
:param executemany: boolean, if ``True``, this is an ``executemany()``
|
||||
call, if ``False``, this is an ``execute()`` call.
|
||||
|
||||
"""
|
||||
|
||||
def handle_error(self, exception_context):
|
||||
r"""Intercept all exceptions processed by the
|
||||
:class:`_engine.Connection`.
|
||||
|
||||
This includes all exceptions emitted by the DBAPI as well as
|
||||
within SQLAlchemy's statement invocation process, including
|
||||
encoding errors and other statement validation errors. Other areas
|
||||
in which the event is invoked include transaction begin and end,
|
||||
result row fetching, cursor creation.
|
||||
|
||||
Note that :meth:`.handle_error` may support new kinds of exceptions
|
||||
and new calling scenarios at *any time*. Code which uses this
|
||||
event must expect new calling patterns to be present in minor
|
||||
releases.
|
||||
|
||||
To support the wide variety of members that correspond to an exception,
|
||||
as well as to allow extensibility of the event without backwards
|
||||
incompatibility, the sole argument received is an instance of
|
||||
:class:`.ExceptionContext`. This object contains data members
|
||||
representing detail about the exception.
|
||||
|
||||
Use cases supported by this hook include:
|
||||
|
||||
* read-only, low-level exception handling for logging and
|
||||
debugging purposes
|
||||
* exception re-writing
|
||||
* Establishing or disabling whether a connection or the owning
|
||||
connection pool is invalidated or expired in response to a
|
||||
specific exception [1]_.
|
||||
|
||||
The hook is called while the cursor from the failed operation
|
||||
(if any) is still open and accessible. Special cleanup operations
|
||||
can be called on this cursor; SQLAlchemy will attempt to close
|
||||
this cursor subsequent to this hook being invoked. If the connection
|
||||
is in "autocommit" mode, the transaction also remains open within
|
||||
the scope of this hook; the rollback of the per-statement transaction
|
||||
also occurs after the hook is called.
|
||||
|
||||
.. note::
|
||||
|
||||
.. [1] The pool "pre_ping" handler enabled using the
|
||||
:paramref:`_sa.create_engine.pool_pre_ping` parameter does
|
||||
**not** consult this event before deciding if the "ping"
|
||||
returned false, as opposed to receiving an unhandled error.
|
||||
For this use case, the :ref:`legacy recipe based on
|
||||
engine_connect() may be used
|
||||
<pool_disconnects_pessimistic_custom>`. A future API allow
|
||||
more comprehensive customization of the "disconnect"
|
||||
detection mechanism across all functions.
|
||||
|
||||
A handler function has two options for replacing
|
||||
the SQLAlchemy-constructed exception into one that is user
|
||||
defined. It can either raise this new exception directly, in
|
||||
which case all further event listeners are bypassed and the
|
||||
exception will be raised, after appropriate cleanup as taken
|
||||
place::
|
||||
|
||||
@event.listens_for(Engine, "handle_error")
|
||||
def handle_exception(context):
|
||||
if isinstance(context.original_exception,
|
||||
psycopg2.OperationalError) and \
|
||||
"failed" in str(context.original_exception):
|
||||
raise MySpecialException("failed operation")
|
||||
|
||||
.. warning:: Because the
|
||||
:meth:`_events.ConnectionEvents.handle_error`
|
||||
event specifically provides for exceptions to be re-thrown as
|
||||
the ultimate exception raised by the failed statement,
|
||||
**stack traces will be misleading** if the user-defined event
|
||||
handler itself fails and throws an unexpected exception;
|
||||
the stack trace may not illustrate the actual code line that
|
||||
failed! It is advised to code carefully here and use
|
||||
logging and/or inline debugging if unexpected exceptions are
|
||||
occurring.
|
||||
|
||||
Alternatively, a "chained" style of event handling can be
|
||||
used, by configuring the handler with the ``retval=True``
|
||||
modifier and returning the new exception instance from the
|
||||
function. In this case, event handling will continue onto the
|
||||
next handler. The "chained" exception is available using
|
||||
:attr:`.ExceptionContext.chained_exception`::
|
||||
|
||||
@event.listens_for(Engine, "handle_error", retval=True)
|
||||
def handle_exception(context):
|
||||
if context.chained_exception is not None and \
|
||||
"special" in context.chained_exception.message:
|
||||
return MySpecialException("failed",
|
||||
cause=context.chained_exception)
|
||||
|
||||
Handlers that return ``None`` may be used within the chain; when
|
||||
a handler returns ``None``, the previous exception instance,
|
||||
if any, is maintained as the current exception that is passed onto the
|
||||
next handler.
|
||||
|
||||
When a custom exception is raised or returned, SQLAlchemy raises
|
||||
this new exception as-is, it is not wrapped by any SQLAlchemy
|
||||
object. If the exception is not a subclass of
|
||||
:class:`sqlalchemy.exc.StatementError`,
|
||||
certain features may not be available; currently this includes
|
||||
the ORM's feature of adding a detail hint about "autoflush" to
|
||||
exceptions raised within the autoflush process.
|
||||
|
||||
:param context: an :class:`.ExceptionContext` object. See this
|
||||
class for details on all available members.
|
||||
|
||||
.. versionadded:: 0.9.7 Added the
|
||||
:meth:`_events.ConnectionEvents.handle_error` hook.
|
||||
|
||||
.. versionchanged:: 1.1 The :meth:`.handle_error` event will now
|
||||
receive all exceptions that inherit from ``BaseException``,
|
||||
including ``SystemExit`` and ``KeyboardInterrupt``. The setting for
|
||||
:attr:`.ExceptionContext.is_disconnect` is ``True`` in this case and
|
||||
the default for
|
||||
:attr:`.ExceptionContext.invalidate_pool_on_disconnect` is
|
||||
``False``.
|
||||
|
||||
.. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now
|
||||
invoked when an :class:`_engine.Engine` fails during the initial
|
||||
call to :meth:`_engine.Engine.connect`, as well as when a
|
||||
:class:`_engine.Connection` object encounters an error during a
|
||||
reconnect operation.
|
||||
|
||||
.. versionchanged:: 1.0.0 The :meth:`.handle_error` event is
|
||||
not fired off when a dialect makes use of the
|
||||
``skip_user_error_events`` execution option. This is used
|
||||
by dialects which intend to catch SQLAlchemy-specific exceptions
|
||||
within specific operations, such as when the MySQL dialect detects
|
||||
a table not present within the ``has_table()`` dialect method.
|
||||
Prior to 1.0.0, code which implements :meth:`.handle_error` needs
|
||||
to ensure that exceptions thrown in these scenarios are re-raised
|
||||
without modification.
|
||||
|
||||
"""
|
||||
|
||||
def engine_connect(self, conn, branch):
|
||||
"""Intercept the creation of a new :class:`_engine.Connection`.
|
||||
|
||||
This event is called typically as the direct result of calling
|
||||
the :meth:`_engine.Engine.connect` method.
|
||||
|
||||
It differs from the :meth:`_events.PoolEvents.connect` method, which
|
||||
refers to the actual connection to a database at the DBAPI level;
|
||||
a DBAPI connection may be pooled and reused for many operations.
|
||||
In contrast, this event refers only to the production of a higher level
|
||||
:class:`_engine.Connection` wrapper around such a DBAPI connection.
|
||||
|
||||
It also differs from the :meth:`_events.PoolEvents.checkout` event
|
||||
in that it is specific to the :class:`_engine.Connection` object,
|
||||
not the
|
||||
DBAPI connection that :meth:`_events.PoolEvents.checkout` deals with,
|
||||
although
|
||||
this DBAPI connection is available here via the
|
||||
:attr:`_engine.Connection.connection` attribute.
|
||||
But note there can in fact
|
||||
be multiple :meth:`_events.PoolEvents.checkout`
|
||||
events within the lifespan
|
||||
of a single :class:`_engine.Connection` object, if that
|
||||
:class:`_engine.Connection`
|
||||
is invalidated and re-established. There can also be multiple
|
||||
:class:`_engine.Connection`
|
||||
objects generated for the same already-checked-out
|
||||
DBAPI connection, in the case that a "branch" of a
|
||||
:class:`_engine.Connection`
|
||||
is produced.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object.
|
||||
:param branch: if True, this is a "branch" of an existing
|
||||
:class:`_engine.Connection`. A branch is generated within the course
|
||||
of a statement execution to invoke supplemental statements, most
|
||||
typically to pre-execute a SELECT of a default value for the purposes
|
||||
of an INSERT statement.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.PoolEvents.checkout`
|
||||
the lower-level pool checkout event
|
||||
for an individual DBAPI connection
|
||||
|
||||
"""
|
||||
|
||||
def set_connection_execution_options(self, conn, opts):
|
||||
"""Intercept when the :meth:`_engine.Connection.execution_options`
|
||||
method is called.
|
||||
|
||||
This method is called after the new :class:`_engine.Connection`
|
||||
has been
|
||||
produced, with the newly updated execution options collection, but
|
||||
before the :class:`.Dialect` has acted upon any of those new options.
|
||||
|
||||
Note that this method is not called when a new
|
||||
:class:`_engine.Connection`
|
||||
is produced which is inheriting execution options from its parent
|
||||
:class:`_engine.Engine`; to intercept this condition, use the
|
||||
:meth:`_events.ConnectionEvents.engine_connect` event.
|
||||
|
||||
:param conn: The newly copied :class:`_engine.Connection` object
|
||||
|
||||
:param opts: dictionary of options that were passed to the
|
||||
:meth:`_engine.Connection.execution_options` method.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.ConnectionEvents.set_engine_execution_options`
|
||||
- event
|
||||
which is called when :meth:`_engine.Engine.execution_options`
|
||||
is called.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def set_engine_execution_options(self, engine, opts):
|
||||
"""Intercept when the :meth:`_engine.Engine.execution_options`
|
||||
method is called.
|
||||
|
||||
The :meth:`_engine.Engine.execution_options` method produces a shallow
|
||||
copy of the :class:`_engine.Engine` which stores the new options.
|
||||
That new
|
||||
:class:`_engine.Engine` is passed here.
|
||||
A particular application of this
|
||||
method is to add a :meth:`_events.ConnectionEvents.engine_connect`
|
||||
event
|
||||
handler to the given :class:`_engine.Engine`
|
||||
which will perform some per-
|
||||
:class:`_engine.Connection` task specific to these execution options.
|
||||
|
||||
:param conn: The newly copied :class:`_engine.Engine` object
|
||||
|
||||
:param opts: dictionary of options that were passed to the
|
||||
:meth:`_engine.Connection.execution_options` method.
|
||||
|
||||
.. versionadded:: 0.9.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.ConnectionEvents.set_connection_execution_options`
|
||||
- event
|
||||
which is called when :meth:`_engine.Connection.execution_options`
|
||||
is
|
||||
called.
|
||||
|
||||
"""
|
||||
|
||||
def engine_disposed(self, engine):
|
||||
"""Intercept when the :meth:`_engine.Engine.dispose` method is called.
|
||||
|
||||
The :meth:`_engine.Engine.dispose` method instructs the engine to
|
||||
"dispose" of it's connection pool (e.g. :class:`_pool.Pool`), and
|
||||
replaces it with a new one. Disposing of the old pool has the
|
||||
effect that existing checked-in connections are closed. The new
|
||||
pool does not establish any new connections until it is first used.
|
||||
|
||||
This event can be used to indicate that resources related to the
|
||||
:class:`_engine.Engine` should also be cleaned up,
|
||||
keeping in mind that the
|
||||
:class:`_engine.Engine`
|
||||
can still be used for new requests in which case
|
||||
it re-acquires connection resources.
|
||||
|
||||
.. versionadded:: 1.0.5
|
||||
|
||||
"""
|
||||
|
||||
def begin(self, conn):
|
||||
"""Intercept begin() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
|
||||
"""
|
||||
|
||||
def rollback(self, conn):
|
||||
"""Intercept rollback() events, as initiated by a
|
||||
:class:`.Transaction`.
|
||||
|
||||
Note that the :class:`_pool.Pool` also "auto-rolls back"
|
||||
a DBAPI connection upon checkin, if the ``reset_on_return``
|
||||
flag is set to its default value of ``'rollback'``.
|
||||
To intercept this
|
||||
rollback, use the :meth:`_events.PoolEvents.reset` hook.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.PoolEvents.reset`
|
||||
|
||||
"""
|
||||
|
||||
def commit(self, conn):
|
||||
"""Intercept commit() events, as initiated by a
|
||||
:class:`.Transaction`.
|
||||
|
||||
Note that the :class:`_pool.Pool` may also "auto-commit"
|
||||
a DBAPI connection upon checkin, if the ``reset_on_return``
|
||||
flag is set to the value ``'commit'``. To intercept this
|
||||
commit, use the :meth:`_events.PoolEvents.reset` hook.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
"""
|
||||
|
||||
def savepoint(self, conn, name):
|
||||
"""Intercept savepoint() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param name: specified name used for the savepoint.
|
||||
|
||||
"""
|
||||
|
||||
def rollback_savepoint(self, conn, name, context):
|
||||
"""Intercept rollback_savepoint() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param name: specified name used for the savepoint.
|
||||
:param context: not used
|
||||
|
||||
"""
|
||||
# TODO: deprecate "context"
|
||||
|
||||
def release_savepoint(self, conn, name, context):
|
||||
"""Intercept release_savepoint() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param name: specified name used for the savepoint.
|
||||
:param context: not used
|
||||
|
||||
"""
|
||||
# TODO: deprecate "context"
|
||||
|
||||
def begin_twophase(self, conn, xid):
|
||||
"""Intercept begin_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
|
||||
"""
|
||||
|
||||
def prepare_twophase(self, conn, xid):
|
||||
"""Intercept prepare_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
"""
|
||||
|
||||
def rollback_twophase(self, conn, xid, is_prepared):
|
||||
"""Intercept rollback_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
:param is_prepared: boolean, indicates if
|
||||
:meth:`.TwoPhaseTransaction.prepare` was called.
|
||||
|
||||
"""
|
||||
|
||||
def commit_twophase(self, conn, xid, is_prepared):
|
||||
"""Intercept commit_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
:param is_prepared: boolean, indicates if
|
||||
:meth:`.TwoPhaseTransaction.prepare` was called.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class DialectEvents(event.Events):
|
||||
"""event interface for execution-replacement functions.
|
||||
|
||||
These events allow direct instrumentation and replacement
|
||||
of key dialect functions which interact with the DBAPI.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.DialectEvents` hooks should be considered **semi-public**
|
||||
and experimental.
|
||||
These hooks are not for general use and are only for those situations
|
||||
where intricate re-statement of DBAPI mechanics must be injected onto
|
||||
an existing dialect. For general-use statement-interception events,
|
||||
please use the :class:`_events.ConnectionEvents` interface.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.ConnectionEvents.before_cursor_execute`
|
||||
|
||||
:meth:`_events.ConnectionEvents.before_execute`
|
||||
|
||||
:meth:`_events.ConnectionEvents.after_cursor_execute`
|
||||
|
||||
:meth:`_events.ConnectionEvents.after_execute`
|
||||
|
||||
|
||||
.. versionadded:: 0.9.4
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeEngine"
|
||||
_dispatch_target = Dialect
|
||||
|
||||
@classmethod
|
||||
def _listen(cls, event_key, retval=False):
|
||||
target = event_key.dispatch_target
|
||||
|
||||
target._has_events = True
|
||||
event_key.base_listen()
|
||||
|
||||
@classmethod
|
||||
def _accept_with(cls, target):
|
||||
if isinstance(target, type):
|
||||
if issubclass(target, Engine):
|
||||
return Dialect
|
||||
elif issubclass(target, Dialect):
|
||||
return target
|
||||
elif isinstance(target, Engine):
|
||||
return target.dialect
|
||||
elif isinstance(target, Dialect):
|
||||
return target
|
||||
elif hasattr(target, "dispatch") and hasattr(
|
||||
target.dispatch._events, "_no_async_engine_events"
|
||||
):
|
||||
target.dispatch._events._no_async_engine_events()
|
||||
else:
|
||||
return None
|
||||
|
||||
def do_connect(self, dialect, conn_rec, cargs, cparams):
|
||||
"""Receive connection arguments before a connection is made.
|
||||
|
||||
This event is useful in that it allows the handler to manipulate the
|
||||
cargs and/or cparams collections that control how the DBAPI
|
||||
``connect()`` function will be called. ``cargs`` will always be a
|
||||
Python list that can be mutated in-place, and ``cparams`` a Python
|
||||
dictionary that may also be mutated::
|
||||
|
||||
e = create_engine("postgresql+psycopg2://user@host/dbname")
|
||||
|
||||
@event.listens_for(e, 'do_connect')
|
||||
def receive_do_connect(dialect, conn_rec, cargs, cparams):
|
||||
cparams["password"] = "some_password"
|
||||
|
||||
The event hook may also be used to override the call to ``connect()``
|
||||
entirely, by returning a non-``None`` DBAPI connection object::
|
||||
|
||||
e = create_engine("postgresql+psycopg2://user@host/dbname")
|
||||
|
||||
@event.listens_for(e, 'do_connect')
|
||||
def receive_do_connect(dialect, conn_rec, cargs, cparams):
|
||||
return psycopg2.connect(*cargs, **cparams)
|
||||
|
||||
|
||||
.. versionadded:: 1.0.3
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`custom_dbapi_args`
|
||||
|
||||
"""
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context):
|
||||
"""Receive a cursor to have executemany() called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_execute_no_params(self, cursor, statement, context):
|
||||
"""Receive a cursor to have execute() with no parameters called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_execute(self, cursor, statement, parameters, context):
|
||||
"""Receive a cursor to have execute() called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_setinputsizes(
|
||||
self, inputsizes, cursor, statement, parameters, context
|
||||
):
|
||||
"""Receive the setinputsizes dictionary for possible modification.
|
||||
|
||||
This event is emitted in the case where the dialect makes use of the
|
||||
DBAPI ``cursor.setinputsizes()`` method which passes information about
|
||||
parameter binding for a particular statement. The given
|
||||
``inputsizes`` dictionary will contain :class:`.BindParameter` objects
|
||||
as keys, linked to DBAPI-specific type objects as values; for
|
||||
parameters that are not bound, they are added to the dictionary with
|
||||
``None`` as the value, which means the parameter will not be included
|
||||
in the ultimate setinputsizes call. The event may be used to inspect
|
||||
and/or log the datatypes that are being bound, as well as to modify the
|
||||
dictionary in place. Parameters can be added, modified, or removed
|
||||
from this dictionary. Callers will typically want to inspect the
|
||||
:attr:`.BindParameter.type` attribute of the given bind objects in
|
||||
order to make decisions about the DBAPI object.
|
||||
|
||||
After the event, the ``inputsizes`` dictionary is converted into
|
||||
an appropriate datastructure to be passed to ``cursor.setinputsizes``;
|
||||
either a list for a positional bound parameter execution style,
|
||||
or a dictionary of string parameter keys to DBAPI type objects for
|
||||
a named bound parameter execution style.
|
||||
|
||||
The setinputsizes hook overall is only used for dialects which include
|
||||
the flag ``use_setinputsizes=True``. Dialects which use this
|
||||
include cx_Oracle, pg8000, asyncpg, and pyodbc dialects.
|
||||
|
||||
.. note::
|
||||
|
||||
For use with pyodbc, the ``use_setinputsizes`` flag
|
||||
must be passed to the dialect, e.g.::
|
||||
|
||||
create_engine("mssql+pyodbc://...", use_setinputsizes=True)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mssql_pyodbc_setinputsizes`
|
||||
|
||||
.. versionadded:: 1.2.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`cx_oracle_setinputsizes`
|
||||
|
||||
"""
|
||||
pass
|
||||
1719
lib/sqlalchemy/engine/interfaces.py
Normal file
1719
lib/sqlalchemy/engine/interfaces.py
Normal file
File diff suppressed because it is too large
Load Diff
118
lib/sqlalchemy/engine/mock.py
Normal file
118
lib/sqlalchemy/engine/mock.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# engine/mock.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from operator import attrgetter
|
||||
|
||||
from . import base
|
||||
from . import url as _url
|
||||
from .. import util
|
||||
from ..sql import ddl
|
||||
|
||||
|
||||
class MockConnection(base.Connectable):
|
||||
def __init__(self, dialect, execute):
|
||||
self._dialect = dialect
|
||||
self.execute = execute
|
||||
|
||||
engine = property(lambda s: s)
|
||||
dialect = property(attrgetter("_dialect"))
|
||||
name = property(lambda s: s._dialect.name)
|
||||
|
||||
def schema_for_object(self, obj):
|
||||
return obj.schema
|
||||
|
||||
def connect(self, **kwargs):
|
||||
return self
|
||||
|
||||
def execution_options(self, **kw):
|
||||
return self
|
||||
|
||||
def compiler(self, statement, parameters, **kwargs):
|
||||
return self._dialect.compiler(
|
||||
statement, parameters, engine=self, **kwargs
|
||||
)
|
||||
|
||||
def create(self, entity, **kwargs):
|
||||
kwargs["checkfirst"] = False
|
||||
|
||||
ddl.SchemaGenerator(self.dialect, self, **kwargs).traverse_single(
|
||||
entity
|
||||
)
|
||||
|
||||
def drop(self, entity, **kwargs):
|
||||
kwargs["checkfirst"] = False
|
||||
|
||||
ddl.SchemaDropper(self.dialect, self, **kwargs).traverse_single(entity)
|
||||
|
||||
def _run_ddl_visitor(
|
||||
self, visitorcallable, element, connection=None, **kwargs
|
||||
):
|
||||
kwargs["checkfirst"] = False
|
||||
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
|
||||
|
||||
def execute(self, object_, *multiparams, **params):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def create_mock_engine(url, executor, **kw):
|
||||
"""Create a "mock" engine used for echoing DDL.
|
||||
|
||||
This is a utility function used for debugging or storing the output of DDL
|
||||
sequences as generated by :meth:`_schema.MetaData.create_all`
|
||||
and related methods.
|
||||
|
||||
The function accepts a URL which is used only to determine the kind of
|
||||
dialect to be used, as well as an "executor" callable function which
|
||||
will receive a SQL expression object and parameters, which can then be
|
||||
echoed or otherwise printed. The executor's return value is not handled,
|
||||
nor does the engine allow regular string statements to be invoked, and
|
||||
is therefore only useful for DDL that is sent to the database without
|
||||
receiving any results.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy import create_mock_engine
|
||||
|
||||
def dump(sql, *multiparams, **params):
|
||||
print(sql.compile(dialect=engine.dialect))
|
||||
|
||||
engine = create_mock_engine('postgresql://', dump)
|
||||
metadata.create_all(engine, checkfirst=False)
|
||||
|
||||
:param url: A string URL which typically needs to contain only the
|
||||
database backend name.
|
||||
|
||||
:param executor: a callable which receives the arguments ``sql``,
|
||||
``*multiparams`` and ``**params``. The ``sql`` parameter is typically
|
||||
an instance of :class:`.DDLElement`, which can then be compiled into a
|
||||
string using :meth:`.DDLElement.compile`.
|
||||
|
||||
.. versionadded:: 1.4 - the :func:`.create_mock_engine` function replaces
|
||||
the previous "mock" engine strategy used with
|
||||
:func:`_sa.create_engine`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`faq_ddl_as_string`
|
||||
|
||||
"""
|
||||
|
||||
# create url.URL object
|
||||
u = _url.make_url(url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kw:
|
||||
dialect_args[k] = kw.pop(k)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
return MockConnection(dialect, executor)
|
||||
1160
lib/sqlalchemy/engine/reflection.py
Normal file
1160
lib/sqlalchemy/engine/reflection.py
Normal file
File diff suppressed because it is too large
Load Diff
1857
lib/sqlalchemy/engine/result.py
Normal file
1857
lib/sqlalchemy/engine/result.py
Normal file
File diff suppressed because it is too large
Load Diff
621
lib/sqlalchemy/engine/row.py
Normal file
621
lib/sqlalchemy/engine/row.py
Normal file
@@ -0,0 +1,621 @@
|
||||
# engine/row.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Define row constructs including :class:`.Row`."""
|
||||
|
||||
|
||||
import operator
|
||||
|
||||
from .. import util
|
||||
from ..sql import util as sql_util
|
||||
from ..util.compat import collections_abc
|
||||
|
||||
MD_INDEX = 0 # integer index in cursor.description
|
||||
|
||||
# This reconstructor is necessary so that pickles with the C extension or
|
||||
# without use the same Binary format.
|
||||
try:
|
||||
# We need a different reconstructor on the C extension so that we can
|
||||
# add extra checks that fields have correctly been initialized by
|
||||
# __setstate__.
|
||||
from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
|
||||
|
||||
# The extra function embedding is needed so that the
|
||||
# reconstructor function has the same signature whether or not
|
||||
# the extension is present.
|
||||
def rowproxy_reconstructor(cls, state):
|
||||
return safe_rowproxy_reconstructor(cls, state)
|
||||
|
||||
|
||||
except ImportError:
|
||||
|
||||
def rowproxy_reconstructor(cls, state):
|
||||
obj = cls.__new__(cls)
|
||||
obj.__setstate__(state)
|
||||
return obj
|
||||
|
||||
|
||||
KEY_INTEGER_ONLY = 0
|
||||
"""__getitem__ only allows integer values, raises TypeError otherwise"""
|
||||
|
||||
KEY_OBJECTS_ONLY = 1
|
||||
"""__getitem__ only allows string/object values, raises TypeError otherwise"""
|
||||
|
||||
KEY_OBJECTS_BUT_WARN = 2
|
||||
"""__getitem__ allows integer or string/object values, but emits a 2.0
|
||||
deprecation warning if string/object is passed"""
|
||||
|
||||
KEY_OBJECTS_NO_WARN = 3
|
||||
"""__getitem__ allows integer or string/object values with no warnings
|
||||
or errors."""
|
||||
|
||||
try:
|
||||
from sqlalchemy.cresultproxy import BaseRow
|
||||
|
||||
_baserow_usecext = True
|
||||
except ImportError:
|
||||
_baserow_usecext = False
|
||||
|
||||
class BaseRow(object):
|
||||
__slots__ = ("_parent", "_data", "_keymap", "_key_style")
|
||||
|
||||
def __init__(self, parent, processors, keymap, key_style, data):
|
||||
"""Row objects are constructed by CursorResult objects."""
|
||||
|
||||
object.__setattr__(self, "_parent", parent)
|
||||
|
||||
if processors:
|
||||
object.__setattr__(
|
||||
self,
|
||||
"_data",
|
||||
tuple(
|
||||
[
|
||||
proc(value) if proc else value
|
||||
for proc, value in zip(processors, data)
|
||||
]
|
||||
),
|
||||
)
|
||||
else:
|
||||
object.__setattr__(self, "_data", tuple(data))
|
||||
|
||||
object.__setattr__(self, "_keymap", keymap)
|
||||
|
||||
object.__setattr__(self, "_key_style", key_style)
|
||||
|
||||
def __reduce__(self):
|
||||
return (
|
||||
rowproxy_reconstructor,
|
||||
(self.__class__, self.__getstate__()),
|
||||
)
|
||||
|
||||
def _filter_on_values(self, filters):
|
||||
return Row(
|
||||
self._parent,
|
||||
filters,
|
||||
self._keymap,
|
||||
self._key_style,
|
||||
self._data,
|
||||
)
|
||||
|
||||
def _values_impl(self):
|
||||
return list(self)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._data)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._data)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._data)
|
||||
|
||||
def _get_by_int_impl(self, key):
|
||||
return self._data[key]
|
||||
|
||||
def _get_by_key_impl(self, key):
|
||||
if int in key.__class__.__mro__:
|
||||
return self._data[key]
|
||||
|
||||
if self._key_style == KEY_INTEGER_ONLY:
|
||||
self._parent._raise_for_nonint(key)
|
||||
|
||||
# the following is all LegacyRow support. none of this
|
||||
# should be called if not LegacyRow
|
||||
# assert isinstance(self, LegacyRow)
|
||||
|
||||
try:
|
||||
rec = self._keymap[key]
|
||||
except KeyError as ke:
|
||||
rec = self._parent._key_fallback(key, ke)
|
||||
except TypeError:
|
||||
if isinstance(key, slice):
|
||||
return tuple(self._data[key])
|
||||
else:
|
||||
raise
|
||||
|
||||
mdindex = rec[MD_INDEX]
|
||||
if mdindex is None:
|
||||
self._parent._raise_for_ambiguous_column_name(rec)
|
||||
|
||||
elif self._key_style == KEY_OBJECTS_BUT_WARN and mdindex != key:
|
||||
self._parent._warn_for_nonint(key)
|
||||
|
||||
return self._data[mdindex]
|
||||
|
||||
# The original 1.4 plan was that Row would not allow row["str"]
|
||||
# access, however as the C extensions were inadvertently allowing
|
||||
# this coupled with the fact that orm Session sets future=True,
|
||||
# this allows a softer upgrade path. see #6218
|
||||
__getitem__ = _get_by_key_impl
|
||||
|
||||
def _get_by_key_impl_mapping(self, key):
|
||||
try:
|
||||
rec = self._keymap[key]
|
||||
except KeyError as ke:
|
||||
rec = self._parent._key_fallback(key, ke)
|
||||
|
||||
mdindex = rec[MD_INDEX]
|
||||
if mdindex is None:
|
||||
self._parent._raise_for_ambiguous_column_name(rec)
|
||||
elif (
|
||||
self._key_style == KEY_OBJECTS_ONLY
|
||||
and int in key.__class__.__mro__
|
||||
):
|
||||
raise KeyError(key)
|
||||
|
||||
return self._data[mdindex]
|
||||
|
||||
def __getattr__(self, name):
|
||||
try:
|
||||
return self._get_by_key_impl_mapping(name)
|
||||
except KeyError as e:
|
||||
util.raise_(AttributeError(e.args[0]), replace_context=e)
|
||||
|
||||
|
||||
class Row(BaseRow, collections_abc.Sequence):
|
||||
"""Represent a single result row.
|
||||
|
||||
The :class:`.Row` object represents a row of a database result. It is
|
||||
typically associated in the 1.x series of SQLAlchemy with the
|
||||
:class:`_engine.CursorResult` object, however is also used by the ORM for
|
||||
tuple-like results as of SQLAlchemy 1.4.
|
||||
|
||||
The :class:`.Row` object seeks to act as much like a Python named
|
||||
tuple as possible. For mapping (i.e. dictionary) behavior on a row,
|
||||
such as testing for containment of keys, refer to the :attr:`.Row._mapping`
|
||||
attribute.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`tutorial_selecting_data` - includes examples of selecting
|
||||
rows from SELECT statements.
|
||||
|
||||
:class:`.LegacyRow` - Compatibility interface introduced in SQLAlchemy
|
||||
1.4.
|
||||
|
||||
.. versionchanged:: 1.4
|
||||
|
||||
Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a
|
||||
"proxy" object in that it contains the final form of data within it,
|
||||
and now acts mostly like a named tuple. Mapping-like functionality is
|
||||
moved to the :attr:`.Row._mapping` attribute, but will remain available
|
||||
in SQLAlchemy 1.x series via the :class:`.LegacyRow` class that is used
|
||||
by :class:`_engine.LegacyCursorResult`.
|
||||
See :ref:`change_4710_core` for background
|
||||
on this change.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
# in 2.0, this should be KEY_INTEGER_ONLY
|
||||
_default_key_style = KEY_OBJECTS_BUT_WARN
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
def __delattr__(self, name):
|
||||
raise AttributeError("can't delete attribute")
|
||||
|
||||
@property
|
||||
def _mapping(self):
|
||||
"""Return a :class:`.RowMapping` for this :class:`.Row`.
|
||||
|
||||
This object provides a consistent Python mapping (i.e. dictionary)
|
||||
interface for the data contained within the row. The :class:`.Row`
|
||||
by itself behaves like a named tuple, however in the 1.4 series of
|
||||
SQLAlchemy, the :class:`.LegacyRow` class is still used by Core which
|
||||
continues to have mapping-like behaviors against the row object
|
||||
itself.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._fields`
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
"""
|
||||
return RowMapping(
|
||||
self._parent,
|
||||
None,
|
||||
self._keymap,
|
||||
RowMapping._default_key_style,
|
||||
self._data,
|
||||
)
|
||||
|
||||
def _special_name_accessor(name):
|
||||
"""Handle ambiguous names such as "count" and "index" """
|
||||
|
||||
@property
|
||||
def go(self):
|
||||
if self._parent._has_key(name):
|
||||
return self.__getattr__(name)
|
||||
else:
|
||||
|
||||
def meth(*arg, **kw):
|
||||
return getattr(collections_abc.Sequence, name)(
|
||||
self, *arg, **kw
|
||||
)
|
||||
|
||||
return meth
|
||||
|
||||
return go
|
||||
|
||||
count = _special_name_accessor("count")
|
||||
index = _special_name_accessor("index")
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._data
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
"_parent": self._parent,
|
||||
"_data": self._data,
|
||||
"_key_style": self._key_style,
|
||||
}
|
||||
|
||||
def __setstate__(self, state):
|
||||
parent = state["_parent"]
|
||||
object.__setattr__(self, "_parent", parent)
|
||||
object.__setattr__(self, "_data", state["_data"])
|
||||
object.__setattr__(self, "_keymap", parent._keymap)
|
||||
object.__setattr__(self, "_key_style", state["_key_style"])
|
||||
|
||||
def _op(self, other, op):
|
||||
return (
|
||||
op(tuple(self), tuple(other))
|
||||
if isinstance(other, Row)
|
||||
else op(tuple(self), other)
|
||||
)
|
||||
|
||||
__hash__ = BaseRow.__hash__
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._op(other, operator.lt)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._op(other, operator.le)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._op(other, operator.ge)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._op(other, operator.gt)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._op(other, operator.eq)
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._op(other, operator.ne)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(sql_util._repr_row(self))
|
||||
|
||||
@util.deprecated_20(
|
||||
":meth:`.Row.keys`",
|
||||
alternative="Use the namedtuple standard accessor "
|
||||
":attr:`.Row._fields`, or for full mapping behavior use "
|
||||
"row._mapping.keys() ",
|
||||
)
|
||||
def keys(self):
|
||||
"""Return the list of keys as strings represented by this
|
||||
:class:`.Row`.
|
||||
|
||||
The keys can represent the labels of the columns returned by a core
|
||||
statement or the names of the orm classes returned by an orm
|
||||
execution.
|
||||
|
||||
This method is analogous to the Python dictionary ``.keys()`` method,
|
||||
except that it returns a list, not an iterator.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._fields`
|
||||
|
||||
:attr:`.Row._mapping`
|
||||
|
||||
"""
|
||||
return self._parent.keys
|
||||
|
||||
@property
|
||||
def _fields(self):
|
||||
"""Return a tuple of string keys as represented by this
|
||||
:class:`.Row`.
|
||||
|
||||
The keys can represent the labels of the columns returned by a core
|
||||
statement or the names of the orm classes returned by an orm
|
||||
execution.
|
||||
|
||||
This attribute is analogous to the Python named tuple ``._fields``
|
||||
attribute.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._mapping`
|
||||
|
||||
"""
|
||||
return tuple([k for k in self._parent.keys if k is not None])
|
||||
|
||||
def _asdict(self):
|
||||
"""Return a new dict which maps field names to their corresponding
|
||||
values.
|
||||
|
||||
This method is analogous to the Python named tuple ``._asdict()``
|
||||
method, and works by applying the ``dict()`` constructor to the
|
||||
:attr:`.Row._mapping` attribute.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._mapping`
|
||||
|
||||
"""
|
||||
return dict(self._mapping)
|
||||
|
||||
def _replace(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def _field_defaults(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class LegacyRow(Row):
|
||||
"""A subclass of :class:`.Row` that delivers 1.x SQLAlchemy behaviors
|
||||
for Core.
|
||||
|
||||
The :class:`.LegacyRow` class is where most of the Python mapping
|
||||
(i.e. dictionary-like)
|
||||
behaviors are implemented for the row object. The mapping behavior
|
||||
of :class:`.Row` going forward is accessible via the :class:`.Row._mapping`
|
||||
attribute.
|
||||
|
||||
.. versionadded:: 1.4 - added :class:`.LegacyRow` which encapsulates most
|
||||
of the deprecated behaviors of :class:`.Row`.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if util.SQLALCHEMY_WARN_20:
|
||||
_default_key_style = KEY_OBJECTS_BUT_WARN
|
||||
else:
|
||||
_default_key_style = KEY_OBJECTS_NO_WARN
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._parent._contains(key, self)
|
||||
|
||||
# prior to #6218, LegacyRow would redirect the behavior of __getitem__
|
||||
# for the non C version of BaseRow. This is now set up by Python BaseRow
|
||||
# in all cases
|
||||
# if not _baserow_usecext:
|
||||
# __getitem__ = BaseRow._get_by_key_impl
|
||||
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
"The :meth:`.LegacyRow.has_key` method is deprecated and will be "
|
||||
"removed in a future release. To test for key membership, use "
|
||||
"the :attr:`Row._mapping` attribute, i.e. 'key in row._mapping`.",
|
||||
)
|
||||
def has_key(self, key):
|
||||
"""Return True if this :class:`.LegacyRow` contains the given key.
|
||||
|
||||
Through the SQLAlchemy 1.x series, the ``__contains__()`` method of
|
||||
:class:`.Row` (or :class:`.LegacyRow` as of SQLAlchemy 1.4) also links
|
||||
to :meth:`.Row.has_key`, in that an expression such as ::
|
||||
|
||||
"some_col" in row
|
||||
|
||||
Will return True if the row contains a column named ``"some_col"``,
|
||||
in the way that a Python mapping works.
|
||||
|
||||
However, it is planned that the 2.0 series of SQLAlchemy will reverse
|
||||
this behavior so that ``__contains__()`` will refer to a value being
|
||||
present in the row, in the way that a Python tuple works.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`change_4710_core`
|
||||
|
||||
"""
|
||||
|
||||
return self._parent._has_key(key)
|
||||
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
"The :meth:`.LegacyRow.items` method is deprecated and will be "
|
||||
"removed in a future release. Use the :attr:`Row._mapping` "
|
||||
"attribute, i.e., 'row._mapping.items()'.",
|
||||
)
|
||||
def items(self):
|
||||
"""Return a list of tuples, each tuple containing a key/value pair.
|
||||
|
||||
This method is analogous to the Python dictionary ``.items()`` method,
|
||||
except that it returns a list, not an iterator.
|
||||
|
||||
"""
|
||||
|
||||
return [(key, self[key]) for key in self.keys()]
|
||||
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
"The :meth:`.LegacyRow.iterkeys` method is deprecated and will be "
|
||||
"removed in a future release. Use the :attr:`Row._mapping` "
|
||||
"attribute, i.e., 'row._mapping.keys()'.",
|
||||
)
|
||||
def iterkeys(self):
|
||||
"""Return a an iterator against the :meth:`.Row.keys` method.
|
||||
|
||||
This method is analogous to the Python-2-only dictionary
|
||||
``.iterkeys()`` method.
|
||||
|
||||
"""
|
||||
return iter(self._parent.keys)
|
||||
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
"The :meth:`.LegacyRow.itervalues` method is deprecated and will be "
|
||||
"removed in a future release. Use the :attr:`Row._mapping` "
|
||||
"attribute, i.e., 'row._mapping.values()'.",
|
||||
)
|
||||
def itervalues(self):
|
||||
"""Return a an iterator against the :meth:`.Row.values` method.
|
||||
|
||||
This method is analogous to the Python-2-only dictionary
|
||||
``.itervalues()`` method.
|
||||
|
||||
"""
|
||||
return iter(self)
|
||||
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
"The :meth:`.LegacyRow.values` method is deprecated and will be "
|
||||
"removed in a future release. Use the :attr:`Row._mapping` "
|
||||
"attribute, i.e., 'row._mapping.values()'.",
|
||||
)
|
||||
def values(self):
|
||||
"""Return the values represented by this :class:`.Row` as a list.
|
||||
|
||||
This method is analogous to the Python dictionary ``.values()`` method,
|
||||
except that it returns a list, not an iterator.
|
||||
|
||||
"""
|
||||
|
||||
return self._values_impl()
|
||||
|
||||
|
||||
BaseRowProxy = BaseRow
|
||||
RowProxy = Row
|
||||
|
||||
|
||||
class ROMappingView(
|
||||
collections_abc.KeysView,
|
||||
collections_abc.ValuesView,
|
||||
collections_abc.ItemsView,
|
||||
):
|
||||
__slots__ = (
|
||||
"_mapping",
|
||||
"_items",
|
||||
)
|
||||
|
||||
def __init__(self, mapping, items):
|
||||
self._mapping = mapping
|
||||
self._items = items
|
||||
|
||||
def __len__(self):
|
||||
return len(self._items)
|
||||
|
||||
def __repr__(self):
|
||||
return "{0.__class__.__name__}({0._mapping!r})".format(self)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._items)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._items
|
||||
|
||||
def __eq__(self, other):
|
||||
return list(other) == list(self)
|
||||
|
||||
def __ne__(self, other):
|
||||
return list(other) != list(self)
|
||||
|
||||
|
||||
class RowMapping(BaseRow, collections_abc.Mapping):
|
||||
"""A ``Mapping`` that maps column names and objects to :class:`.Row`
|
||||
values.
|
||||
|
||||
The :class:`.RowMapping` is available from a :class:`.Row` via the
|
||||
:attr:`.Row._mapping` attribute, as well as from the iterable interface
|
||||
provided by the :class:`.MappingResult` object returned by the
|
||||
:meth:`_engine.Result.mappings` method.
|
||||
|
||||
:class:`.RowMapping` supplies Python mapping (i.e. dictionary) access to
|
||||
the contents of the row. This includes support for testing of
|
||||
containment of specific keys (string column names or objects), as well
|
||||
as iteration of keys, values, and items::
|
||||
|
||||
for row in result:
|
||||
if 'a' in row._mapping:
|
||||
print("Column 'a': %s" % row._mapping['a'])
|
||||
|
||||
print("Column b: %s" % row._mapping[table.c.b])
|
||||
|
||||
|
||||
.. versionadded:: 1.4 The :class:`.RowMapping` object replaces the
|
||||
mapping-like access previously provided by a database result row,
|
||||
which now seeks to behave mostly like a named tuple.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
_default_key_style = KEY_OBJECTS_ONLY
|
||||
|
||||
if not _baserow_usecext:
|
||||
|
||||
__getitem__ = BaseRow._get_by_key_impl_mapping
|
||||
|
||||
def _values_impl(self):
|
||||
return list(self._data)
|
||||
|
||||
def __iter__(self):
|
||||
return (k for k in self._parent.keys if k is not None)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._data)
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._parent._has_key(key)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(dict(self))
|
||||
|
||||
def items(self):
|
||||
"""Return a view of key/value tuples for the elements in the
|
||||
underlying :class:`.Row`.
|
||||
|
||||
"""
|
||||
return ROMappingView(self, [(key, self[key]) for key in self.keys()])
|
||||
|
||||
def keys(self):
|
||||
"""Return a view of 'keys' for string column names represented
|
||||
by the underlying :class:`.Row`.
|
||||
|
||||
"""
|
||||
|
||||
return self._parent.keys
|
||||
|
||||
def values(self):
|
||||
"""Return a view of values for the values represented in the
|
||||
underlying :class:`.Row`.
|
||||
|
||||
"""
|
||||
return ROMappingView(self, self._values_impl())
|
||||
17
lib/sqlalchemy/engine/strategies.py
Normal file
17
lib/sqlalchemy/engine/strategies.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# engine/strategies.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Deprecated mock engine strategy used by Alembic.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from .mock import MockConnection # noqa
|
||||
|
||||
|
||||
class MockEngineStrategy(object):
|
||||
MockConnection = MockConnection
|
||||
806
lib/sqlalchemy/engine/url.py
Normal file
806
lib/sqlalchemy/engine/url.py
Normal file
@@ -0,0 +1,806 @@
|
||||
# engine/url.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
|
||||
information about a database connection specification.
|
||||
|
||||
The URL object is created automatically when
|
||||
:func:`~sqlalchemy.engine.create_engine` is called with a string
|
||||
argument; alternatively, the URL is a public-facing construct which can
|
||||
be used directly and is also accepted directly by ``create_engine()``.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from .interfaces import Dialect
|
||||
from .. import exc
|
||||
from .. import util
|
||||
from ..dialects import plugins
|
||||
from ..dialects import registry
|
||||
from ..util import collections_abc
|
||||
from ..util import compat
|
||||
|
||||
|
||||
class URL(
|
||||
util.namedtuple(
|
||||
"URL",
|
||||
[
|
||||
"drivername",
|
||||
"username",
|
||||
"password",
|
||||
"host",
|
||||
"port",
|
||||
"database",
|
||||
"query",
|
||||
],
|
||||
)
|
||||
):
|
||||
"""
|
||||
Represent the components of a URL used to connect to a database.
|
||||
|
||||
This object is suitable to be passed directly to a
|
||||
:func:`_sa.create_engine` call. The fields of the URL are parsed
|
||||
from a string by the :func:`.make_url` function. The string
|
||||
format of the URL is an RFC-1738-style string.
|
||||
|
||||
To create a new :class:`_engine.URL` object, use the
|
||||
:func:`_engine.url.make_url` function. To construct a :class:`_engine.URL`
|
||||
programmatically, use the :meth:`_engine.URL.create` constructor.
|
||||
|
||||
.. versionchanged:: 1.4
|
||||
|
||||
The :class:`_engine.URL` object is now an immutable object. To
|
||||
create a URL, use the :func:`_engine.make_url` or
|
||||
:meth:`_engine.URL.create` function / method. To modify
|
||||
a :class:`_engine.URL`, use methods like
|
||||
:meth:`_engine.URL.set` and
|
||||
:meth:`_engine.URL.update_query_dict` to return a new
|
||||
:class:`_engine.URL` object with modifications. See notes for this
|
||||
change at :ref:`change_5526`.
|
||||
|
||||
:class:`_engine.URL` contains the following attributes:
|
||||
|
||||
* :attr:`_engine.URL.drivername`: database backend and driver name, such as
|
||||
``postgresql+psycopg2``
|
||||
* :attr:`_engine.URL.username`: username string
|
||||
* :attr:`_engine.URL.password`: password string
|
||||
* :attr:`_engine.URL.host`: string hostname
|
||||
* :attr:`_engine.URL.port`: integer port number
|
||||
* :attr:`_engine.URL.database`: string database name
|
||||
* :attr:`_engine.URL.query`: an immutable mapping representing the query
|
||||
string. contains strings for keys and either strings or tuples of
|
||||
strings for values.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __new__(self, *arg, **kw):
|
||||
if kw.pop("_new_ok", False):
|
||||
return super(URL, self).__new__(self, *arg, **kw)
|
||||
else:
|
||||
util.warn_deprecated(
|
||||
"Calling URL() directly is deprecated and will be disabled "
|
||||
"in a future release. The public constructor for URL is "
|
||||
"now the URL.create() method.",
|
||||
"1.4",
|
||||
)
|
||||
return URL.create(*arg, **kw)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
drivername,
|
||||
username=None,
|
||||
password=None,
|
||||
host=None,
|
||||
port=None,
|
||||
database=None,
|
||||
query=util.EMPTY_DICT,
|
||||
):
|
||||
"""Create a new :class:`_engine.URL` object.
|
||||
|
||||
:param drivername: the name of the database backend. This name will
|
||||
correspond to a module in sqlalchemy/databases or a third party
|
||||
plug-in.
|
||||
:param username: The user name.
|
||||
:param password: database password. Is typically a string, but may
|
||||
also be an object that can be stringified with ``str()``.
|
||||
|
||||
.. note:: A password-producing object will be stringified only
|
||||
**once** per :class:`_engine.Engine` object. For dynamic password
|
||||
generation per connect, see :ref:`engines_dynamic_tokens`.
|
||||
|
||||
:param host: The name of the host.
|
||||
:param port: The port number.
|
||||
:param database: The database name.
|
||||
:param query: A dictionary of string keys to string values to be passed
|
||||
to the dialect and/or the DBAPI upon connect. To specify non-string
|
||||
parameters to a Python DBAPI directly, use the
|
||||
:paramref:`_sa.create_engine.connect_args` parameter to
|
||||
:func:`_sa.create_engine`. See also
|
||||
:attr:`_engine.URL.normalized_query` for a dictionary that is
|
||||
consistently string->list of string.
|
||||
:return: new :class:`_engine.URL` object.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
The :class:`_engine.URL` object is now an **immutable named
|
||||
tuple**. In addition, the ``query`` dictionary is also immutable.
|
||||
To create a URL, use the :func:`_engine.url.make_url` or
|
||||
:meth:`_engine.URL.create` function/ method. To modify a
|
||||
:class:`_engine.URL`, use the :meth:`_engine.URL.set` and
|
||||
:meth:`_engine.URL.update_query` methods.
|
||||
|
||||
"""
|
||||
|
||||
return cls(
|
||||
cls._assert_str(drivername, "drivername"),
|
||||
cls._assert_none_str(username, "username"),
|
||||
password,
|
||||
cls._assert_none_str(host, "host"),
|
||||
cls._assert_port(port),
|
||||
cls._assert_none_str(database, "database"),
|
||||
cls._str_dict(query),
|
||||
_new_ok=True,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _assert_port(cls, port):
|
||||
if port is None:
|
||||
return None
|
||||
try:
|
||||
return int(port)
|
||||
except TypeError:
|
||||
raise TypeError("Port argument must be an integer or None")
|
||||
|
||||
@classmethod
|
||||
def _assert_str(cls, v, paramname):
|
||||
if not isinstance(v, compat.string_types):
|
||||
raise TypeError("%s must be a string" % paramname)
|
||||
return v
|
||||
|
||||
@classmethod
|
||||
def _assert_none_str(cls, v, paramname):
|
||||
if v is None:
|
||||
return v
|
||||
|
||||
return cls._assert_str(v, paramname)
|
||||
|
||||
@classmethod
|
||||
def _str_dict(cls, dict_):
|
||||
if dict_ is None:
|
||||
return util.EMPTY_DICT
|
||||
|
||||
def _assert_value(val):
|
||||
if isinstance(val, compat.string_types):
|
||||
return val
|
||||
elif isinstance(val, collections_abc.Sequence):
|
||||
return tuple(_assert_value(elem) for elem in val)
|
||||
else:
|
||||
raise TypeError(
|
||||
"Query dictionary values must be strings or "
|
||||
"sequences of strings"
|
||||
)
|
||||
|
||||
def _assert_str(v):
|
||||
if not isinstance(v, compat.string_types):
|
||||
raise TypeError("Query dictionary keys must be strings")
|
||||
return v
|
||||
|
||||
if isinstance(dict_, collections_abc.Sequence):
|
||||
dict_items = dict_
|
||||
else:
|
||||
dict_items = dict_.items()
|
||||
|
||||
return util.immutabledict(
|
||||
{
|
||||
_assert_str(key): _assert_value(
|
||||
value,
|
||||
)
|
||||
for key, value in dict_items
|
||||
}
|
||||
)
|
||||
|
||||
def set(
|
||||
self,
|
||||
drivername=None,
|
||||
username=None,
|
||||
password=None,
|
||||
host=None,
|
||||
port=None,
|
||||
database=None,
|
||||
query=None,
|
||||
):
|
||||
"""return a new :class:`_engine.URL` object with modifications.
|
||||
|
||||
Values are used if they are non-None. To set a value to ``None``
|
||||
explicitly, use the :meth:`_engine.URL._replace` method adapted
|
||||
from ``namedtuple``.
|
||||
|
||||
:param drivername: new drivername
|
||||
:param username: new username
|
||||
:param password: new password
|
||||
:param host: new hostname
|
||||
:param port: new port
|
||||
:param query: new query parameters, passed a dict of string keys
|
||||
referring to string or sequence of string values. Fully
|
||||
replaces the previous list of arguments.
|
||||
|
||||
:return: new :class:`_engine.URL` object.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_engine.URL.update_query_dict`
|
||||
|
||||
"""
|
||||
|
||||
kw = {}
|
||||
if drivername is not None:
|
||||
kw["drivername"] = drivername
|
||||
if username is not None:
|
||||
kw["username"] = username
|
||||
if password is not None:
|
||||
kw["password"] = password
|
||||
if host is not None:
|
||||
kw["host"] = host
|
||||
if port is not None:
|
||||
kw["port"] = port
|
||||
if database is not None:
|
||||
kw["database"] = database
|
||||
if query is not None:
|
||||
kw["query"] = query
|
||||
|
||||
return self._replace(**kw)
|
||||
|
||||
def _replace(self, **kw):
|
||||
"""Override ``namedtuple._replace()`` to provide argument checking."""
|
||||
|
||||
if "drivername" in kw:
|
||||
self._assert_str(kw["drivername"], "drivername")
|
||||
for name in "username", "host", "database":
|
||||
if name in kw:
|
||||
self._assert_none_str(kw[name], name)
|
||||
if "port" in kw:
|
||||
self._assert_port(kw["port"])
|
||||
if "query" in kw:
|
||||
kw["query"] = self._str_dict(kw["query"])
|
||||
|
||||
return super(URL, self)._replace(**kw)
|
||||
|
||||
def update_query_string(self, query_string, append=False):
|
||||
"""Return a new :class:`_engine.URL` object with the :attr:`_engine.URL.query`
|
||||
parameter dictionary updated by the given query string.
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql://user:pass@host/dbname")
|
||||
>>> url = url.update_query_string("alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt")
|
||||
>>> str(url)
|
||||
'postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
|
||||
|
||||
:param query_string: a URL escaped query string, not including the
|
||||
question mark.
|
||||
|
||||
:param append: if True, parameters in the existing query string will
|
||||
not be removed; new parameters will be in addition to those present.
|
||||
If left at its default of False, keys present in the given query
|
||||
parameters will replace those of the existing query string.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.update_query_dict`
|
||||
|
||||
""" # noqa: E501
|
||||
return self.update_query_pairs(
|
||||
util.parse_qsl(query_string), append=append
|
||||
)
|
||||
|
||||
def update_query_pairs(self, key_value_pairs, append=False):
|
||||
"""Return a new :class:`_engine.URL` object with the
|
||||
:attr:`_engine.URL.query`
|
||||
parameter dictionary updated by the given sequence of key/value pairs
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql://user:pass@host/dbname")
|
||||
>>> url = url.update_query_pairs([("alt_host", "host1"), ("alt_host", "host2"), ("ssl_cipher", "/path/to/crt")])
|
||||
>>> str(url)
|
||||
'postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
|
||||
|
||||
:param key_value_pairs: A sequence of tuples containing two strings
|
||||
each.
|
||||
|
||||
:param append: if True, parameters in the existing query string will
|
||||
not be removed; new parameters will be in addition to those present.
|
||||
If left at its default of False, keys present in the given query
|
||||
parameters will replace those of the existing query string.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.difference_update_query`
|
||||
|
||||
:meth:`_engine.URL.set`
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
existing_query = self.query
|
||||
new_keys = {}
|
||||
|
||||
for key, value in key_value_pairs:
|
||||
if key in new_keys:
|
||||
new_keys[key] = util.to_list(new_keys[key])
|
||||
new_keys[key].append(value)
|
||||
else:
|
||||
new_keys[key] = value
|
||||
|
||||
if append:
|
||||
new_query = {}
|
||||
|
||||
for k in new_keys:
|
||||
if k in existing_query:
|
||||
new_query[k] = util.to_list(
|
||||
existing_query[k]
|
||||
) + util.to_list(new_keys[k])
|
||||
else:
|
||||
new_query[k] = new_keys[k]
|
||||
|
||||
new_query.update(
|
||||
{
|
||||
k: existing_query[k]
|
||||
for k in set(existing_query).difference(new_keys)
|
||||
}
|
||||
)
|
||||
else:
|
||||
new_query = self.query.union(new_keys)
|
||||
return self.set(query=new_query)
|
||||
|
||||
def update_query_dict(self, query_parameters, append=False):
|
||||
"""Return a new :class:`_engine.URL` object with the
|
||||
:attr:`_engine.URL.query` parameter dictionary updated by the given
|
||||
dictionary.
|
||||
|
||||
The dictionary typically contains string keys and string values.
|
||||
In order to represent a query parameter that is expressed multiple
|
||||
times, pass a sequence of string values.
|
||||
|
||||
E.g.::
|
||||
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql://user:pass@host/dbname")
|
||||
>>> url = url.update_query_dict({"alt_host": ["host1", "host2"], "ssl_cipher": "/path/to/crt"})
|
||||
>>> str(url)
|
||||
'postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
|
||||
|
||||
|
||||
:param query_parameters: A dictionary with string keys and values
|
||||
that are either strings, or sequences of strings.
|
||||
|
||||
:param append: if True, parameters in the existing query string will
|
||||
not be removed; new parameters will be in addition to those present.
|
||||
If left at its default of False, keys present in the given query
|
||||
parameters will replace those of the existing query string.
|
||||
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.update_query_string`
|
||||
|
||||
:meth:`_engine.URL.update_query_pairs`
|
||||
|
||||
:meth:`_engine.URL.difference_update_query`
|
||||
|
||||
:meth:`_engine.URL.set`
|
||||
|
||||
""" # noqa: E501
|
||||
return self.update_query_pairs(query_parameters.items(), append=append)
|
||||
|
||||
def difference_update_query(self, names):
|
||||
"""
|
||||
Remove the given names from the :attr:`_engine.URL.query` dictionary,
|
||||
returning the new :class:`_engine.URL`.
|
||||
|
||||
E.g.::
|
||||
|
||||
url = url.difference_update_query(['foo', 'bar'])
|
||||
|
||||
Equivalent to using :meth:`_engine.URL.set` as follows::
|
||||
|
||||
url = url.set(
|
||||
query={
|
||||
key: url.query[key]
|
||||
for key in set(url.query).difference(['foo', 'bar'])
|
||||
}
|
||||
)
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.update_query_dict`
|
||||
|
||||
:meth:`_engine.URL.set`
|
||||
|
||||
"""
|
||||
|
||||
if not set(names).intersection(self.query):
|
||||
return self
|
||||
|
||||
return URL(
|
||||
self.drivername,
|
||||
self.username,
|
||||
self.password,
|
||||
self.host,
|
||||
self.port,
|
||||
self.database,
|
||||
util.immutabledict(
|
||||
{
|
||||
key: self.query[key]
|
||||
for key in set(self.query).difference(names)
|
||||
}
|
||||
),
|
||||
_new_ok=True,
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def normalized_query(self):
|
||||
"""Return the :attr:`_engine.URL.query` dictionary with values normalized
|
||||
into sequences.
|
||||
|
||||
As the :attr:`_engine.URL.query` dictionary may contain either
|
||||
string values or sequences of string values to differentiate between
|
||||
parameters that are specified multiple times in the query string,
|
||||
code that needs to handle multiple parameters generically will wish
|
||||
to use this attribute so that all parameters present are presented
|
||||
as sequences. Inspiration is from Python's ``urllib.parse.parse_qs``
|
||||
function. E.g.::
|
||||
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt")
|
||||
>>> url.query
|
||||
immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'})
|
||||
>>> url.normalized_query
|
||||
immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': ('/path/to/crt',)})
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
return util.immutabledict(
|
||||
{
|
||||
k: (v,) if not isinstance(v, tuple) else v
|
||||
for k, v in self.query.items()
|
||||
}
|
||||
)
|
||||
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
"The :meth:`_engine.URL.__to_string__ method is deprecated and will "
|
||||
"be removed in a future release. Please use the "
|
||||
":meth:`_engine.URL.render_as_string` method.",
|
||||
)
|
||||
def __to_string__(self, hide_password=True):
|
||||
"""Render this :class:`_engine.URL` object as a string.
|
||||
|
||||
:param hide_password: Defaults to True. The password is not shown
|
||||
in the string unless this is set to False.
|
||||
|
||||
"""
|
||||
return self.render_as_string(hide_password=hide_password)
|
||||
|
||||
def render_as_string(self, hide_password=True):
|
||||
"""Render this :class:`_engine.URL` object as a string.
|
||||
|
||||
This method is used when the ``__str__()`` or ``__repr__()``
|
||||
methods are used. The method directly includes additional options.
|
||||
|
||||
:param hide_password: Defaults to True. The password is not shown
|
||||
in the string unless this is set to False.
|
||||
|
||||
"""
|
||||
s = self.drivername + "://"
|
||||
if self.username is not None:
|
||||
s += _rfc_1738_quote(self.username)
|
||||
if self.password is not None:
|
||||
s += ":" + (
|
||||
"***"
|
||||
if hide_password
|
||||
else _rfc_1738_quote(str(self.password))
|
||||
)
|
||||
s += "@"
|
||||
if self.host is not None:
|
||||
if ":" in self.host:
|
||||
s += "[%s]" % self.host
|
||||
else:
|
||||
s += self.host
|
||||
if self.port is not None:
|
||||
s += ":" + str(self.port)
|
||||
if self.database is not None:
|
||||
s += "/" + self.database
|
||||
if self.query:
|
||||
keys = list(self.query)
|
||||
keys.sort()
|
||||
s += "?" + "&".join(
|
||||
"%s=%s" % (util.quote_plus(k), util.quote_plus(element))
|
||||
for k in keys
|
||||
for element in util.to_list(self.query[k])
|
||||
)
|
||||
return s
|
||||
|
||||
def __str__(self):
|
||||
return self.render_as_string(hide_password=False)
|
||||
|
||||
def __repr__(self):
|
||||
return self.render_as_string()
|
||||
|
||||
def __copy__(self):
|
||||
return self.__class__.create(
|
||||
self.drivername,
|
||||
self.username,
|
||||
self.password,
|
||||
self.host,
|
||||
self.port,
|
||||
self.database,
|
||||
# note this is an immutabledict of str-> str / tuple of str,
|
||||
# also fully immutable. does not require deepcopy
|
||||
self.query,
|
||||
)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
return self.__copy__()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
isinstance(other, URL)
|
||||
and self.drivername == other.drivername
|
||||
and self.username == other.username
|
||||
and self.password == other.password
|
||||
and self.host == other.host
|
||||
and self.database == other.database
|
||||
and self.query == other.query
|
||||
and self.port == other.port
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def get_backend_name(self):
|
||||
"""Return the backend name.
|
||||
|
||||
This is the name that corresponds to the database backend in
|
||||
use, and is the portion of the :attr:`_engine.URL.drivername`
|
||||
that is to the left of the plus sign.
|
||||
|
||||
"""
|
||||
if "+" not in self.drivername:
|
||||
return self.drivername
|
||||
else:
|
||||
return self.drivername.split("+")[0]
|
||||
|
||||
def get_driver_name(self):
|
||||
"""Return the backend name.
|
||||
|
||||
This is the name that corresponds to the DBAPI driver in
|
||||
use, and is the portion of the :attr:`_engine.URL.drivername`
|
||||
that is to the right of the plus sign.
|
||||
|
||||
If the :attr:`_engine.URL.drivername` does not include a plus sign,
|
||||
then the default :class:`_engine.Dialect` for this :class:`_engine.URL`
|
||||
is imported in order to get the driver name.
|
||||
|
||||
"""
|
||||
|
||||
if "+" not in self.drivername:
|
||||
return self.get_dialect().driver
|
||||
else:
|
||||
return self.drivername.split("+")[1]
|
||||
|
||||
def _instantiate_plugins(self, kwargs):
|
||||
plugin_names = util.to_list(self.query.get("plugin", ()))
|
||||
plugin_names += kwargs.get("plugins", [])
|
||||
|
||||
kwargs = dict(kwargs)
|
||||
|
||||
loaded_plugins = [
|
||||
plugins.load(plugin_name)(self, kwargs)
|
||||
for plugin_name in plugin_names
|
||||
]
|
||||
|
||||
u = self.difference_update_query(["plugin", "plugins"])
|
||||
|
||||
for plugin in loaded_plugins:
|
||||
new_u = plugin.update_url(u)
|
||||
if new_u is not None:
|
||||
u = new_u
|
||||
|
||||
kwargs.pop("plugins", None)
|
||||
|
||||
return u, loaded_plugins, kwargs
|
||||
|
||||
def _get_entrypoint(self):
|
||||
"""Return the "entry point" dialect class.
|
||||
|
||||
This is normally the dialect itself except in the case when the
|
||||
returned class implements the get_dialect_cls() method.
|
||||
|
||||
"""
|
||||
if "+" not in self.drivername:
|
||||
name = self.drivername
|
||||
else:
|
||||
name = self.drivername.replace("+", ".")
|
||||
cls = registry.load(name)
|
||||
# check for legacy dialects that
|
||||
# would return a module with 'dialect' as the
|
||||
# actual class
|
||||
if (
|
||||
hasattr(cls, "dialect")
|
||||
and isinstance(cls.dialect, type)
|
||||
and issubclass(cls.dialect, Dialect)
|
||||
):
|
||||
return cls.dialect
|
||||
else:
|
||||
return cls
|
||||
|
||||
def get_dialect(self):
|
||||
"""Return the SQLAlchemy :class:`_engine.Dialect` class corresponding
|
||||
to this URL's driver name.
|
||||
|
||||
"""
|
||||
entrypoint = self._get_entrypoint()
|
||||
dialect_cls = entrypoint.get_dialect_cls(self)
|
||||
return dialect_cls
|
||||
|
||||
def translate_connect_args(self, names=None, **kw):
|
||||
r"""Translate url attributes into a dictionary of connection arguments.
|
||||
|
||||
Returns attributes of this url (`host`, `database`, `username`,
|
||||
`password`, `port`) as a plain dictionary. The attribute names are
|
||||
used as the keys by default. Unset or false attributes are omitted
|
||||
from the final dictionary.
|
||||
|
||||
:param \**kw: Optional, alternate key names for url attributes.
|
||||
|
||||
:param names: Deprecated. Same purpose as the keyword-based alternate
|
||||
names, but correlates the name to the original positionally.
|
||||
"""
|
||||
|
||||
if names is not None:
|
||||
util.warn_deprecated(
|
||||
"The `URL.translate_connect_args.name`s parameter is "
|
||||
"deprecated. Please pass the "
|
||||
"alternate names as kw arguments.",
|
||||
"1.4",
|
||||
)
|
||||
|
||||
translated = {}
|
||||
attribute_names = ["host", "database", "username", "password", "port"]
|
||||
for sname in attribute_names:
|
||||
if names:
|
||||
name = names.pop(0)
|
||||
elif sname in kw:
|
||||
name = kw[sname]
|
||||
else:
|
||||
name = sname
|
||||
if name is not None and getattr(self, sname, False):
|
||||
if sname == "password":
|
||||
translated[name] = str(getattr(self, sname))
|
||||
else:
|
||||
translated[name] = getattr(self, sname)
|
||||
|
||||
return translated
|
||||
|
||||
|
||||
def make_url(name_or_url):
|
||||
"""Given a string or unicode instance, produce a new URL instance.
|
||||
|
||||
The given string is parsed according to the RFC 1738 spec. If an
|
||||
existing URL object is passed, just returns the object.
|
||||
"""
|
||||
|
||||
if isinstance(name_or_url, util.string_types):
|
||||
return _parse_rfc1738_args(name_or_url)
|
||||
else:
|
||||
return name_or_url
|
||||
|
||||
|
||||
def _parse_rfc1738_args(name):
|
||||
pattern = re.compile(
|
||||
r"""
|
||||
(?P<name>[\w\+]+)://
|
||||
(?:
|
||||
(?P<username>[^:/]*)
|
||||
(?::(?P<password>[^@]*))?
|
||||
@)?
|
||||
(?:
|
||||
(?:
|
||||
\[(?P<ipv6host>[^/\?]+)\] |
|
||||
(?P<ipv4host>[^/:\?]+)
|
||||
)?
|
||||
(?::(?P<port>[^/\?]*))?
|
||||
)?
|
||||
(?:/(?P<database>[^\?]*))?
|
||||
(?:\?(?P<query>.*))?
|
||||
""",
|
||||
re.X,
|
||||
)
|
||||
|
||||
m = pattern.match(name)
|
||||
if m is not None:
|
||||
components = m.groupdict()
|
||||
if components["query"] is not None:
|
||||
query = {}
|
||||
|
||||
for key, value in util.parse_qsl(components["query"]):
|
||||
if util.py2k:
|
||||
key = key.encode("ascii")
|
||||
if key in query:
|
||||
query[key] = util.to_list(query[key])
|
||||
query[key].append(value)
|
||||
else:
|
||||
query[key] = value
|
||||
else:
|
||||
query = None
|
||||
components["query"] = query
|
||||
|
||||
if components["username"] is not None:
|
||||
components["username"] = _rfc_1738_unquote(components["username"])
|
||||
|
||||
if components["password"] is not None:
|
||||
components["password"] = _rfc_1738_unquote(components["password"])
|
||||
|
||||
ipv4host = components.pop("ipv4host")
|
||||
ipv6host = components.pop("ipv6host")
|
||||
components["host"] = ipv4host or ipv6host
|
||||
name = components.pop("name")
|
||||
|
||||
if components["port"]:
|
||||
components["port"] = int(components["port"])
|
||||
|
||||
return URL.create(name, **components)
|
||||
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Could not parse rfc1738 URL from string '%s'" % name
|
||||
)
|
||||
|
||||
|
||||
def _rfc_1738_quote(text):
|
||||
return re.sub(r"[:@/]", lambda m: "%%%X" % ord(m.group(0)), text)
|
||||
|
||||
|
||||
def _rfc_1738_unquote(text):
|
||||
return util.unquote(text)
|
||||
|
||||
|
||||
def _parse_keyvalue_args(name):
|
||||
m = re.match(r"(\w+)://(.*)", name)
|
||||
if m is not None:
|
||||
(name, args) = m.group(1, 2)
|
||||
opts = dict(util.parse_qsl(args))
|
||||
return URL(name, *opts)
|
||||
else:
|
||||
return None
|
||||
253
lib/sqlalchemy/engine/util.py
Normal file
253
lib/sqlalchemy/engine/util.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# engine/util.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from .. import exc
|
||||
from .. import util
|
||||
from ..util import collections_abc
|
||||
from ..util import immutabledict
|
||||
|
||||
|
||||
def connection_memoize(key):
|
||||
"""Decorator, memoize a function in a connection.info stash.
|
||||
|
||||
Only applicable to functions which take no arguments other than a
|
||||
connection. The memo will be stored in ``connection.info[key]``.
|
||||
"""
|
||||
|
||||
@util.decorator
|
||||
def decorated(fn, self, connection):
|
||||
connection = connection.connect()
|
||||
try:
|
||||
return connection.info[key]
|
||||
except KeyError:
|
||||
connection.info[key] = val = fn(self, connection)
|
||||
return val
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
_no_tuple = ()
|
||||
_no_kw = util.immutabledict()
|
||||
|
||||
|
||||
def _distill_params(connection, multiparams, params):
|
||||
r"""Given arguments from the calling form \*multiparams, \**params,
|
||||
return a list of bind parameter structures, usually a list of
|
||||
dictionaries.
|
||||
|
||||
In the case of 'raw' execution which accepts positional parameters,
|
||||
it may be a list of tuples or lists.
|
||||
|
||||
"""
|
||||
|
||||
if not multiparams:
|
||||
if params:
|
||||
connection._warn_for_legacy_exec_format()
|
||||
return [params]
|
||||
else:
|
||||
return []
|
||||
elif len(multiparams) == 1:
|
||||
zero = multiparams[0]
|
||||
if isinstance(zero, (list, tuple)):
|
||||
if (
|
||||
not zero
|
||||
or hasattr(zero[0], "__iter__")
|
||||
and not hasattr(zero[0], "strip")
|
||||
):
|
||||
# execute(stmt, [{}, {}, {}, ...])
|
||||
# execute(stmt, [(), (), (), ...])
|
||||
return zero
|
||||
else:
|
||||
# this is used by exec_driver_sql only, so a deprecation
|
||||
# warning would already be coming from passing a plain
|
||||
# textual statement with positional parameters to
|
||||
# execute().
|
||||
# execute(stmt, ("value", "value"))
|
||||
return [zero]
|
||||
elif hasattr(zero, "keys"):
|
||||
# execute(stmt, {"key":"value"})
|
||||
return [zero]
|
||||
else:
|
||||
connection._warn_for_legacy_exec_format()
|
||||
# execute(stmt, "value")
|
||||
return [[zero]]
|
||||
else:
|
||||
connection._warn_for_legacy_exec_format()
|
||||
if hasattr(multiparams[0], "__iter__") and not hasattr(
|
||||
multiparams[0], "strip"
|
||||
):
|
||||
return multiparams
|
||||
else:
|
||||
return [multiparams]
|
||||
|
||||
|
||||
def _distill_cursor_params(connection, multiparams, params):
|
||||
"""_distill_params without any warnings. more appropriate for
|
||||
"cursor" params that can include tuple arguments, lists of tuples,
|
||||
etc.
|
||||
|
||||
"""
|
||||
|
||||
if not multiparams:
|
||||
if params:
|
||||
return [params]
|
||||
else:
|
||||
return []
|
||||
elif len(multiparams) == 1:
|
||||
zero = multiparams[0]
|
||||
if isinstance(zero, (list, tuple)):
|
||||
if (
|
||||
not zero
|
||||
or hasattr(zero[0], "__iter__")
|
||||
and not hasattr(zero[0], "strip")
|
||||
):
|
||||
# execute(stmt, [{}, {}, {}, ...])
|
||||
# execute(stmt, [(), (), (), ...])
|
||||
return zero
|
||||
else:
|
||||
# this is used by exec_driver_sql only, so a deprecation
|
||||
# warning would already be coming from passing a plain
|
||||
# textual statement with positional parameters to
|
||||
# execute().
|
||||
# execute(stmt, ("value", "value"))
|
||||
|
||||
return [zero]
|
||||
elif hasattr(zero, "keys"):
|
||||
# execute(stmt, {"key":"value"})
|
||||
return [zero]
|
||||
else:
|
||||
# execute(stmt, "value")
|
||||
return [[zero]]
|
||||
else:
|
||||
if hasattr(multiparams[0], "__iter__") and not hasattr(
|
||||
multiparams[0], "strip"
|
||||
):
|
||||
return multiparams
|
||||
else:
|
||||
return [multiparams]
|
||||
|
||||
|
||||
def _distill_params_20(params):
|
||||
if params is None:
|
||||
return _no_tuple, _no_kw
|
||||
elif isinstance(params, list):
|
||||
# collections_abc.MutableSequence): # avoid abc.__instancecheck__
|
||||
if params and not isinstance(
|
||||
params[0], (collections_abc.Mapping, tuple)
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"List argument must consist only of tuples or dictionaries"
|
||||
)
|
||||
|
||||
return (params,), _no_kw
|
||||
elif isinstance(
|
||||
params,
|
||||
(tuple, dict, immutabledict),
|
||||
# only do abc.__instancecheck__ for Mapping after we've checked
|
||||
# for plain dictionaries and would otherwise raise
|
||||
) or isinstance(params, collections_abc.Mapping):
|
||||
return (params,), _no_kw
|
||||
else:
|
||||
raise exc.ArgumentError("mapping or sequence expected for parameters")
|
||||
|
||||
|
||||
class TransactionalContext(object):
|
||||
"""Apply Python context manager behavior to transaction objects.
|
||||
|
||||
Performs validation to ensure the subject of the transaction is not
|
||||
used if the transaction were ended prematurely.
|
||||
|
||||
"""
|
||||
|
||||
_trans_subject = None
|
||||
|
||||
def _transaction_is_active(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _transaction_is_closed(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _rollback_can_be_called(self):
|
||||
"""indicates the object is in a state that is known to be acceptable
|
||||
for rollback() to be called.
|
||||
|
||||
This does not necessarily mean rollback() will succeed or not raise
|
||||
an error, just that there is currently no state detected that indicates
|
||||
rollback() would fail or emit warnings.
|
||||
|
||||
It also does not mean that there's a transaction in progress, as
|
||||
it is usually safe to call rollback() even if no transaction is
|
||||
present.
|
||||
|
||||
.. versionadded:: 1.4.28
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_subject(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def _trans_ctx_check(cls, subject):
|
||||
trans_context = subject._trans_context_manager
|
||||
if trans_context:
|
||||
if not trans_context._transaction_is_active():
|
||||
raise exc.InvalidRequestError(
|
||||
"Can't operate on closed transaction inside context "
|
||||
"manager. Please complete the context manager "
|
||||
"before emitting further commands."
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
subject = self._get_subject()
|
||||
|
||||
# none for outer transaction, may be non-None for nested
|
||||
# savepoint, legacy nesting cases
|
||||
trans_context = subject._trans_context_manager
|
||||
self._outer_trans_ctx = trans_context
|
||||
|
||||
self._trans_subject = subject
|
||||
subject._trans_context_manager = self
|
||||
return self
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
subject = self._trans_subject
|
||||
|
||||
# simplistically we could assume that
|
||||
# "subject._trans_context_manager is self". However, any calling
|
||||
# code that is manipulating __exit__ directly would break this
|
||||
# assumption. alembic context manager
|
||||
# is an example of partial use that just calls __exit__ and
|
||||
# not __enter__ at the moment. it's safe to assume this is being done
|
||||
# in the wild also
|
||||
out_of_band_exit = (
|
||||
subject is None or subject._trans_context_manager is not self
|
||||
)
|
||||
|
||||
if type_ is None and self._transaction_is_active():
|
||||
try:
|
||||
self.commit()
|
||||
except:
|
||||
with util.safe_reraise():
|
||||
if self._rollback_can_be_called():
|
||||
self.rollback()
|
||||
finally:
|
||||
if not out_of_band_exit:
|
||||
subject._trans_context_manager = self._outer_trans_ctx
|
||||
self._trans_subject = self._outer_trans_ctx = None
|
||||
else:
|
||||
try:
|
||||
if not self._transaction_is_active():
|
||||
if not self._transaction_is_closed():
|
||||
self.close()
|
||||
else:
|
||||
if self._rollback_can_be_called():
|
||||
self.rollback()
|
||||
finally:
|
||||
if not out_of_band_exit:
|
||||
subject._trans_context_manager = self._outer_trans_ctx
|
||||
self._trans_subject = self._outer_trans_ctx = None
|
||||
Reference in New Issue
Block a user