first add files

This commit is contained in:
2023-10-08 20:59:00 +08:00
parent b494be364b
commit 1dac226337
991 changed files with 368151 additions and 40 deletions

View File

@@ -0,0 +1,56 @@
# sqlalchemy/pool/__init__.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Connection pooling for DB-API connections.
Provides a number of connection pool implementations for a variety of
usage scenarios and thread behavior requirements imposed by the
application, DB-API or database itself.
Also provides a DB-API 2.0 connection proxying mechanism allowing
regular DB-API connect() methods to be transparently managed by a
SQLAlchemy connection pool.
"""
from . import events
from .base import _ConnectionFairy
from .base import _ConnectionRecord
from .base import _finalize_fairy
from .base import Pool
from .base import reset_commit
from .base import reset_none
from .base import reset_rollback
from .dbapi_proxy import clear_managers
from .dbapi_proxy import manage
from .impl import AssertionPool
from .impl import AsyncAdaptedQueuePool
from .impl import FallbackAsyncAdaptedQueuePool
from .impl import NullPool
from .impl import QueuePool
from .impl import SingletonThreadPool
from .impl import StaticPool
__all__ = [
"Pool",
"reset_commit",
"reset_none",
"reset_rollback",
"clear_managers",
"manage",
"AssertionPool",
"NullPool",
"QueuePool",
"AsyncAdaptedQueuePool",
"FallbackAsyncAdaptedQueuePool",
"SingletonThreadPool",
"StaticPool",
]
# as these are likely to be used in various test suites, debugging
# setups, keep them in the sqlalchemy.pool namespace

1121
lib/sqlalchemy/pool/base.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,147 @@
# sqlalchemy/pool/dbapi_proxy.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""DBAPI proxy utility.
Provides transparent connection pooling on top of a Python DBAPI.
This is legacy SQLAlchemy functionality that is not typically used
today.
"""
from .impl import QueuePool
from .. import util
from ..util import threading
proxies = {}
@util.deprecated(
"1.3",
"The :func:`.pool.manage` function is deprecated, and will be "
"removed in a future release.",
)
def manage(module, **params):
r"""Return a proxy for a DB-API module that automatically
pools connections.
Given a DB-API 2.0 module and pool management parameters, returns
a proxy for the module that will automatically pool connections,
creating new connection pools for each distinct set of connection
arguments sent to the decorated module's connect() function.
:param module: a DB-API 2.0 database module
:param poolclass: the class used by the pool module to provide
pooling. Defaults to :class:`.QueuePool`.
:param \**params: will be passed through to *poolclass*
"""
try:
return proxies[module]
except KeyError:
return proxies.setdefault(module, _DBProxy(module, **params))
def clear_managers():
"""Remove all current DB-API 2.0 managers.
All pools and connections are disposed.
"""
for manager in proxies.values():
manager.close()
proxies.clear()
class _DBProxy(object):
"""Layers connection pooling behavior on top of a standard DB-API module.
Proxies a DB-API 2.0 connect() call to a connection pool keyed to the
specific connect parameters. Other functions and attributes are delegated
to the underlying DB-API module.
"""
def __init__(self, module, poolclass=QueuePool, **kw):
"""Initializes a new proxy.
module
a DB-API 2.0 module
poolclass
a Pool class, defaulting to QueuePool
Other parameters are sent to the Pool object's constructor.
"""
self.module = module
self.kw = kw
self.poolclass = poolclass
self.pools = {}
self._create_pool_mutex = threading.Lock()
def close(self):
for key in list(self.pools):
del self.pools[key]
def __del__(self):
self.close()
def __getattr__(self, key):
return getattr(self.module, key)
def get_pool(self, *args, **kw):
key = self._serialize(*args, **kw)
try:
return self.pools[key]
except KeyError:
with self._create_pool_mutex:
if key not in self.pools:
kw.pop("sa_pool_key", None)
pool = self.poolclass(
lambda: self.module.connect(*args, **kw), **self.kw
)
self.pools[key] = pool
return pool
else:
return self.pools[key]
def connect(self, *args, **kw):
"""Activate a connection to the database.
Connect to the database using this DBProxy's module and the given
connect arguments. If the arguments match an existing pool, the
connection will be returned from the pool's current thread-local
connection instance, or if there is no thread-local connection
instance it will be checked out from the set of pooled connections.
If the pool has no available connections and allows new connections
to be created, a new database connection will be made.
"""
return self.get_pool(*args, **kw).connect()
def dispose(self, *args, **kw):
"""Dispose the pool referenced by the given connect arguments."""
key = self._serialize(*args, **kw)
try:
del self.pools[key]
except KeyError:
pass
def _serialize(self, *args, **kw):
if "sa_pool_key" in kw:
return kw["sa_pool_key"]
return tuple(list(args) + [(k, kw[k]) for k in sorted(kw)])

View File

@@ -0,0 +1,284 @@
# sqlalchemy/pool/events.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from .base import Pool
from .. import event
from ..engine.base import Engine
class PoolEvents(event.Events):
"""Available events for :class:`_pool.Pool`.
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
e.g.::
from sqlalchemy import event
def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
"handle an on checkout event"
event.listen(Pool, 'checkout', my_on_checkout)
In addition to accepting the :class:`_pool.Pool` class and
:class:`_pool.Pool` instances, :class:`_events.PoolEvents` also accepts
:class:`_engine.Engine` objects and the :class:`_engine.Engine` class as
targets, which will be resolved to the ``.pool`` attribute of the
given engine or the :class:`_pool.Pool` class::
engine = create_engine("postgresql://scott:tiger@localhost/test")
# will associate with engine.pool
event.listen(engine, 'checkout', my_on_checkout)
"""
_target_class_doc = "SomeEngineOrPool"
_dispatch_target = Pool
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
if issubclass(target, Engine):
return Pool
elif issubclass(target, Pool):
return target
elif isinstance(target, Engine):
return target.pool
elif isinstance(target, Pool):
return target
elif hasattr(target, "dispatch") and hasattr(
target.dispatch._events, "_no_async_engine_events"
):
target.dispatch._events._no_async_engine_events()
else:
return None
@classmethod
def _listen(cls, event_key, **kw):
target = event_key.dispatch_target
kw.setdefault("asyncio", target._is_asyncio)
event_key.base_listen(**kw)
def connect(self, dbapi_connection, connection_record):
"""Called at the moment a particular DBAPI connection is first
created for a given :class:`_pool.Pool`.
This event allows one to capture the point directly after which
the DBAPI module-level ``.connect()`` method has been used in order
to produce a new DBAPI connection.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def first_connect(self, dbapi_connection, connection_record):
"""Called exactly once for the first time a DBAPI connection is
checked out from a particular :class:`_pool.Pool`.
The rationale for :meth:`_events.PoolEvents.first_connect`
is to determine
information about a particular series of database connections based
on the settings used for all connections. Since a particular
:class:`_pool.Pool`
refers to a single "creator" function (which in terms
of a :class:`_engine.Engine`
refers to the URL and connection options used),
it is typically valid to make observations about a single connection
that can be safely assumed to be valid about all subsequent
connections, such as the database version, the server and client
encoding settings, collation settings, and many others.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param connection_proxy: the :class:`._ConnectionFairy` object which
will proxy the public interface of the DBAPI connection for the
lifespan of the checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
Processing of all checkout listeners will abort and restart
using the new connection.
.. seealso:: :meth:`_events.ConnectionEvents.engine_connect`
- a similar event
which occurs upon creation of a new :class:`_engine.Connection`.
"""
def checkin(self, dbapi_connection, connection_record):
"""Called when a connection returns to the pool.
Note that the connection may be closed, and may be None if the
connection has been invalidated. ``checkin`` will not be called
for detached connections. (They do not return to the pool.)
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def reset(self, dbapi_connection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
when the ``rollback()`` method is called on the DBAPI connection
before it is returned to the pool. The behavior of "reset" can
be controlled, including disabled, using the ``reset_on_return``
pool argument.
The :meth:`_events.PoolEvents.reset` event is usually followed by the
:meth:`_events.PoolEvents.checkin` event is called, except in those
cases where the connection is discarded immediately after reset.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
.. seealso::
:meth:`_events.ConnectionEvents.rollback`
:meth:`_events.ConnectionEvents.commit`
"""
def invalidate(self, dbapi_connection, connection_record, exception):
"""Called when a DBAPI connection is to be "invalidated".
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked, either from API usage or via "auto-invalidation",
without the ``soft`` flag.
The event occurs before a final attempt to call ``.close()`` on the
connection occurs.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param exception: the exception object corresponding to the reason
for this invalidation, if any. May be ``None``.
.. versionadded:: 0.9.2 Added support for connection invalidation
listening.
.. seealso::
:ref:`pool_connection_invalidation`
"""
def soft_invalidate(self, dbapi_connection, connection_record, exception):
"""Called when a DBAPI connection is to be "soft invalidated".
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked with the ``soft`` flag.
Soft invalidation refers to when the connection record that tracks
this connection will force a reconnect after the current connection
is checked in. It does not actively close the dbapi_connection
at the point at which it is called.
.. versionadded:: 1.0.3
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param exception: the exception object corresponding to the reason
for this invalidation, if any. May be ``None``.
"""
def close(self, dbapi_connection, connection_record):
"""Called when a DBAPI connection is closed.
The event is emitted before the close occurs.
The close of a connection can fail; typically this is because
the connection is already closed. If the close operation fails,
the connection is discarded.
The :meth:`.close` event corresponds to a connection that's still
associated with the pool. To intercept close events for detached
connections use :meth:`.close_detached`.
.. versionadded:: 1.1
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def detach(self, dbapi_connection, connection_record):
"""Called when a DBAPI connection is "detached" from a pool.
This event is emitted after the detach occurs. The connection
is no longer associated with the given connection record.
.. versionadded:: 1.1
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def close_detached(self, dbapi_connection):
"""Called when a detached DBAPI connection is closed.
The event is emitted before the close occurs.
The close of a connection can fail; typically this is because
the connection is already closed. If the close operation fails,
the connection is discarded.
.. versionadded:: 1.1
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
"""

514
lib/sqlalchemy/pool/impl.py Normal file
View File

@@ -0,0 +1,514 @@
# sqlalchemy/pool.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Pool implementation classes.
"""
import traceback
import weakref
from .base import _AsyncConnDialect
from .base import _ConnectionFairy
from .base import _ConnectionRecord
from .base import Pool
from .. import exc
from .. import util
from ..util import chop_traceback
from ..util import queue as sqla_queue
from ..util import threading
class QueuePool(Pool):
"""A :class:`_pool.Pool`
that imposes a limit on the number of open connections.
:class:`.QueuePool` is the default pooling implementation used for
all :class:`_engine.Engine` objects, unless the SQLite dialect is in use.
"""
_is_asyncio = False
_queue_class = sqla_queue.Queue
def __init__(
self,
creator,
pool_size=5,
max_overflow=10,
timeout=30.0,
use_lifo=False,
**kw
):
r"""
Construct a QueuePool.
:param creator: a callable function that returns a DB-API
connection object, same as that of :paramref:`_pool.Pool.creator`.
:param pool_size: The size of the pool to be maintained,
defaults to 5. This is the largest number of connections that
will be kept persistently in the pool. Note that the pool
begins with no connections; once this number of connections
is requested, that number of connections will remain.
``pool_size`` can be set to 0 to indicate no size limit; to
disable pooling, use a :class:`~sqlalchemy.pool.NullPool`
instead.
:param max_overflow: The maximum overflow size of the
pool. When the number of checked-out connections reaches the
size set in pool_size, additional connections will be
returned up to this limit. When those additional connections
are returned to the pool, they are disconnected and
discarded. It follows then that the total number of
simultaneous connections the pool will allow is pool_size +
`max_overflow`, and the total number of "sleeping"
connections the pool will allow is pool_size. `max_overflow`
can be set to -1 to indicate no overflow limit; no limit
will be placed on the total number of concurrent
connections. Defaults to 10.
:param timeout: The number of seconds to wait before giving up
on returning a connection. Defaults to 30.0. This can be a float
but is subject to the limitations of Python time functions which
may not be reliable in the tens of milliseconds.
:param use_lifo: use LIFO (last-in-first-out) when retrieving
connections instead of FIFO (first-in-first-out). Using LIFO, a
server-side timeout scheme can reduce the number of connections used
during non-peak periods of use. When planning for server-side
timeouts, ensure that a recycle or pre-ping strategy is in use to
gracefully handle stale connections.
.. versionadded:: 1.3
.. seealso::
:ref:`pool_use_lifo`
:ref:`pool_disconnects`
:param \**kw: Other keyword arguments including
:paramref:`_pool.Pool.recycle`, :paramref:`_pool.Pool.echo`,
:paramref:`_pool.Pool.reset_on_return` and others are passed to the
:class:`_pool.Pool` constructor.
"""
Pool.__init__(self, creator, **kw)
self._pool = self._queue_class(pool_size, use_lifo=use_lifo)
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
self._overflow_lock = threading.Lock()
def _do_return_conn(self, conn):
try:
self._pool.put(conn, False)
except sqla_queue.Full:
try:
conn.close()
finally:
self._dec_overflow()
def _do_get(self):
use_overflow = self._max_overflow > -1
try:
wait = use_overflow and self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
except sqla_queue.Empty:
# don't do things inside of "except Empty", because when we say
# we timed out or can't connect and raise, Python 3 tells
# people the real error is queue.Empty which it isn't.
pass
if use_overflow and self._overflow >= self._max_overflow:
if not wait:
return self._do_get()
else:
raise exc.TimeoutError(
"QueuePool limit of size %d overflow %d reached, "
"connection timed out, timeout %0.2f"
% (self.size(), self.overflow(), self._timeout),
code="3o7r",
)
if self._inc_overflow():
try:
return self._create_connection()
except:
with util.safe_reraise():
self._dec_overflow()
else:
return self._do_get()
def _inc_overflow(self):
if self._max_overflow == -1:
self._overflow += 1
return True
with self._overflow_lock:
if self._overflow < self._max_overflow:
self._overflow += 1
return True
else:
return False
def _dec_overflow(self):
if self._max_overflow == -1:
self._overflow -= 1
return True
with self._overflow_lock:
self._overflow -= 1
return True
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(
self._creator,
pool_size=self._pool.maxsize,
max_overflow=self._max_overflow,
pre_ping=self._pre_ping,
use_lifo=self._pool.use_lifo,
timeout=self._timeout,
recycle=self._recycle,
echo=self.echo,
logging_name=self._orig_logging_name,
reset_on_return=self._reset_on_return,
_dispatch=self.dispatch,
dialect=self._dialect,
)
def dispose(self):
while True:
try:
conn = self._pool.get(False)
conn.close()
except sqla_queue.Empty:
break
self._overflow = 0 - self.size()
self.logger.info("Pool disposed. %s", self.status())
def status(self):
return (
"Pool size: %d Connections in pool: %d "
"Current Overflow: %d Current Checked out "
"connections: %d"
% (
self.size(),
self.checkedin(),
self.overflow(),
self.checkedout(),
)
)
def size(self):
return self._pool.maxsize
def timeout(self):
return self._timeout
def checkedin(self):
return self._pool.qsize()
def overflow(self):
return self._overflow
def checkedout(self):
return self._pool.maxsize - self._pool.qsize() + self._overflow
class AsyncAdaptedQueuePool(QueuePool):
_is_asyncio = True
_queue_class = sqla_queue.AsyncAdaptedQueue
_dialect = _AsyncConnDialect()
class FallbackAsyncAdaptedQueuePool(AsyncAdaptedQueuePool):
_queue_class = sqla_queue.FallbackAsyncAdaptedQueue
class NullPool(Pool):
"""A Pool which does not pool connections.
Instead it literally opens and closes the underlying DB-API connection
per each connection open/close.
Reconnect-related functions such as ``recycle`` and connection
invalidation are not supported by this Pool implementation, since
no connections are held persistently.
"""
def status(self):
return "NullPool"
def _do_return_conn(self, conn):
conn.close()
def _do_get(self):
return self._create_connection()
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(
self._creator,
recycle=self._recycle,
echo=self.echo,
logging_name=self._orig_logging_name,
reset_on_return=self._reset_on_return,
pre_ping=self._pre_ping,
_dispatch=self.dispatch,
dialect=self._dialect,
)
def dispose(self):
pass
class SingletonThreadPool(Pool):
"""A Pool that maintains one connection per thread.
Maintains one connection per each thread, never moving a connection to a
thread other than the one which it was created in.
.. warning:: the :class:`.SingletonThreadPool` will call ``.close()``
on arbitrary connections that exist beyond the size setting of
``pool_size``, e.g. if more unique **thread identities**
than what ``pool_size`` states are used. This cleanup is
non-deterministic and not sensitive to whether or not the connections
linked to those thread identities are currently in use.
:class:`.SingletonThreadPool` may be improved in a future release,
however in its current status it is generally used only for test
scenarios using a SQLite ``:memory:`` database and is not recommended
for production use.
Options are the same as those of :class:`_pool.Pool`, as well as:
:param pool_size: The number of threads in which to maintain connections
at once. Defaults to five.
:class:`.SingletonThreadPool` is used by the SQLite dialect
automatically when a memory-based database is used.
See :ref:`sqlite_toplevel`.
"""
_is_asyncio = False
def __init__(self, creator, pool_size=5, **kw):
Pool.__init__(self, creator, **kw)
self._conn = threading.local()
self._fairy = threading.local()
self._all_conns = set()
self.size = pool_size
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(
self._creator,
pool_size=self.size,
recycle=self._recycle,
echo=self.echo,
pre_ping=self._pre_ping,
logging_name=self._orig_logging_name,
reset_on_return=self._reset_on_return,
_dispatch=self.dispatch,
dialect=self._dialect,
)
def dispose(self):
"""Dispose of this pool."""
for conn in self._all_conns:
try:
conn.close()
except Exception:
# pysqlite won't even let you close a conn from a thread
# that didn't create it
pass
self._all_conns.clear()
def _cleanup(self):
while len(self._all_conns) >= self.size:
c = self._all_conns.pop()
c.close()
def status(self):
return "SingletonThreadPool id:%d size: %d" % (
id(self),
len(self._all_conns),
)
def _do_return_conn(self, conn):
pass
def _do_get(self):
try:
c = self._conn.current()
if c:
return c
except AttributeError:
pass
c = self._create_connection()
self._conn.current = weakref.ref(c)
if len(self._all_conns) >= self.size:
self._cleanup()
self._all_conns.add(c)
return c
def connect(self):
# vendored from Pool to include the now removed use_threadlocal
# behavior
try:
rec = self._fairy.current()
except AttributeError:
pass
else:
if rec is not None:
return rec._checkout_existing()
return _ConnectionFairy._checkout(self, self._fairy)
def _return_conn(self, record):
try:
del self._fairy.current
except AttributeError:
pass
self._do_return_conn(record)
class StaticPool(Pool):
"""A Pool of exactly one connection, used for all requests.
Reconnect-related functions such as ``recycle`` and connection
invalidation (which is also used to support auto-reconnect) are only
partially supported right now and may not yield good results.
"""
@util.memoized_property
def connection(self):
return _ConnectionRecord(self)
def status(self):
return "StaticPool"
def dispose(self):
if (
"connection" in self.__dict__
and self.connection.dbapi_connection is not None
):
self.connection.close()
del self.__dict__["connection"]
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(
creator=self._creator,
recycle=self._recycle,
reset_on_return=self._reset_on_return,
pre_ping=self._pre_ping,
echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch,
dialect=self._dialect,
)
def _transfer_from(self, other_static_pool):
# used by the test suite to make a new engine / pool without
# losing the state of an existing SQLite :memory: connection
self._invoke_creator = (
lambda crec: other_static_pool.connection.dbapi_connection
)
def _create_connection(self):
raise NotImplementedError()
def _do_return_conn(self, conn):
pass
def _do_get(self):
rec = self.connection
if rec._is_hard_or_soft_invalidated():
del self.__dict__["connection"]
rec = self.connection
return rec
class AssertionPool(Pool):
"""A :class:`_pool.Pool` that allows at most one checked out connection at
any given time.
This will raise an exception if more than one connection is checked out
at a time. Useful for debugging code that is using more connections
than desired.
"""
def __init__(self, *args, **kw):
self._conn = None
self._checked_out = False
self._store_traceback = kw.pop("store_traceback", True)
self._checkout_traceback = None
Pool.__init__(self, *args, **kw)
def status(self):
return "AssertionPool"
def _do_return_conn(self, conn):
if not self._checked_out:
raise AssertionError("connection is not checked out")
self._checked_out = False
assert conn is self._conn
def dispose(self):
self._checked_out = False
if self._conn:
self._conn.close()
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(
self._creator,
echo=self.echo,
pre_ping=self._pre_ping,
recycle=self._recycle,
reset_on_return=self._reset_on_return,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch,
dialect=self._dialect,
)
def _do_get(self):
if self._checked_out:
if self._checkout_traceback:
suffix = " at:\n%s" % "".join(
chop_traceback(self._checkout_traceback)
)
else:
suffix = ""
raise AssertionError("connection is already checked out" + suffix)
if not self._conn:
self._conn = self._create_connection()
self._checked_out = True
if self._store_traceback:
self._checkout_traceback = traceback.format_stack()
return self._conn