first add files
This commit is contained in:
175
lib/sqlalchemy/util/__init__.py
Normal file
175
lib/sqlalchemy/util/__init__.py
Normal file
@@ -0,0 +1,175 @@
|
||||
# util/__init__.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager
|
||||
from functools import partial
|
||||
from functools import update_wrapper
|
||||
|
||||
from ._collections import coerce_generator_arg
|
||||
from ._collections import coerce_to_immutabledict
|
||||
from ._collections import collections_abc
|
||||
from ._collections import column_dict
|
||||
from ._collections import column_set
|
||||
from ._collections import EMPTY_DICT
|
||||
from ._collections import EMPTY_SET
|
||||
from ._collections import FacadeDict
|
||||
from ._collections import flatten_iterator
|
||||
from ._collections import has_dupes
|
||||
from ._collections import has_intersection
|
||||
from ._collections import IdentitySet
|
||||
from ._collections import ImmutableContainer
|
||||
from ._collections import immutabledict
|
||||
from ._collections import ImmutableProperties
|
||||
from ._collections import LRUCache
|
||||
from ._collections import ordered_column_set
|
||||
from ._collections import OrderedDict
|
||||
from ._collections import OrderedIdentitySet
|
||||
from ._collections import OrderedProperties
|
||||
from ._collections import OrderedSet
|
||||
from ._collections import PopulateDict
|
||||
from ._collections import Properties
|
||||
from ._collections import ScopedRegistry
|
||||
from ._collections import sort_dictionary
|
||||
from ._collections import ThreadLocalRegistry
|
||||
from ._collections import to_column_set
|
||||
from ._collections import to_list
|
||||
from ._collections import to_set
|
||||
from ._collections import unique_list
|
||||
from ._collections import UniqueAppender
|
||||
from ._collections import update_copy
|
||||
from ._collections import WeakPopulateDict
|
||||
from ._collections import WeakSequence
|
||||
from ._preloaded import preload_module
|
||||
from ._preloaded import preloaded
|
||||
from .compat import ABC
|
||||
from .compat import arm
|
||||
from .compat import b
|
||||
from .compat import b64decode
|
||||
from .compat import b64encode
|
||||
from .compat import binary_type
|
||||
from .compat import binary_types
|
||||
from .compat import byte_buffer
|
||||
from .compat import callable
|
||||
from .compat import cmp
|
||||
from .compat import cpython
|
||||
from .compat import dataclass_fields
|
||||
from .compat import decode_backslashreplace
|
||||
from .compat import dottedgetter
|
||||
from .compat import has_refcount_gc
|
||||
from .compat import inspect_getfullargspec
|
||||
from .compat import int_types
|
||||
from .compat import iterbytes
|
||||
from .compat import itertools_filter
|
||||
from .compat import itertools_filterfalse
|
||||
from .compat import local_dataclass_fields
|
||||
from .compat import namedtuple
|
||||
from .compat import next
|
||||
from .compat import nullcontext
|
||||
from .compat import osx
|
||||
from .compat import parse_qsl
|
||||
from .compat import perf_counter
|
||||
from .compat import pickle
|
||||
from .compat import print_
|
||||
from .compat import py2k
|
||||
from .compat import py311
|
||||
from .compat import py37
|
||||
from .compat import py38
|
||||
from .compat import py39
|
||||
from .compat import py3k
|
||||
from .compat import pypy
|
||||
from .compat import quote_plus
|
||||
from .compat import raise_
|
||||
from .compat import raise_from_cause
|
||||
from .compat import reduce
|
||||
from .compat import reraise
|
||||
from .compat import string_types
|
||||
from .compat import StringIO
|
||||
from .compat import text_type
|
||||
from .compat import threading
|
||||
from .compat import timezone
|
||||
from .compat import TYPE_CHECKING
|
||||
from .compat import u
|
||||
from .compat import ue
|
||||
from .compat import unquote
|
||||
from .compat import unquote_plus
|
||||
from .compat import win32
|
||||
from .compat import with_metaclass
|
||||
from .compat import zip_longest
|
||||
from .concurrency import asyncio
|
||||
from .concurrency import await_fallback
|
||||
from .concurrency import await_only
|
||||
from .concurrency import greenlet_spawn
|
||||
from .concurrency import is_exit_exception
|
||||
from .deprecations import deprecated
|
||||
from .deprecations import deprecated_20
|
||||
from .deprecations import deprecated_20_cls
|
||||
from .deprecations import deprecated_cls
|
||||
from .deprecations import deprecated_params
|
||||
from .deprecations import inject_docstring_text
|
||||
from .deprecations import moved_20
|
||||
from .deprecations import SQLALCHEMY_WARN_20
|
||||
from .deprecations import warn_deprecated
|
||||
from .deprecations import warn_deprecated_20
|
||||
from .langhelpers import add_parameter_text
|
||||
from .langhelpers import as_interface
|
||||
from .langhelpers import asbool
|
||||
from .langhelpers import asint
|
||||
from .langhelpers import assert_arg_type
|
||||
from .langhelpers import attrsetter
|
||||
from .langhelpers import bool_or_str
|
||||
from .langhelpers import chop_traceback
|
||||
from .langhelpers import class_hierarchy
|
||||
from .langhelpers import classproperty
|
||||
from .langhelpers import clsname_as_plain_name
|
||||
from .langhelpers import coerce_kw_type
|
||||
from .langhelpers import constructor_copy
|
||||
from .langhelpers import constructor_key
|
||||
from .langhelpers import counter
|
||||
from .langhelpers import create_proxy_methods
|
||||
from .langhelpers import decode_slice
|
||||
from .langhelpers import decorator
|
||||
from .langhelpers import dictlike_iteritems
|
||||
from .langhelpers import duck_type_collection
|
||||
from .langhelpers import ellipses_string
|
||||
from .langhelpers import EnsureKWArgType
|
||||
from .langhelpers import format_argspec_init
|
||||
from .langhelpers import format_argspec_plus
|
||||
from .langhelpers import generic_repr
|
||||
from .langhelpers import get_callable_argspec
|
||||
from .langhelpers import get_cls_kwargs
|
||||
from .langhelpers import get_func_kwargs
|
||||
from .langhelpers import getargspec_init
|
||||
from .langhelpers import has_compiled_ext
|
||||
from .langhelpers import HasMemoized
|
||||
from .langhelpers import hybridmethod
|
||||
from .langhelpers import hybridproperty
|
||||
from .langhelpers import iterate_attributes
|
||||
from .langhelpers import map_bits
|
||||
from .langhelpers import md5_hex
|
||||
from .langhelpers import memoized_instancemethod
|
||||
from .langhelpers import memoized_property
|
||||
from .langhelpers import MemoizedSlots
|
||||
from .langhelpers import method_is_overridden
|
||||
from .langhelpers import methods_equivalent
|
||||
from .langhelpers import monkeypatch_proxied_specials
|
||||
from .langhelpers import NoneType
|
||||
from .langhelpers import only_once
|
||||
from .langhelpers import PluginLoader
|
||||
from .langhelpers import portable_instancemethod
|
||||
from .langhelpers import quoted_token_parser
|
||||
from .langhelpers import safe_reraise
|
||||
from .langhelpers import set_creation_order
|
||||
from .langhelpers import string_or_unprintable
|
||||
from .langhelpers import symbol
|
||||
from .langhelpers import unbound_method_to_callable
|
||||
from .langhelpers import walk_subclasses
|
||||
from .langhelpers import warn
|
||||
from .langhelpers import warn_exception
|
||||
from .langhelpers import warn_limited
|
||||
from .langhelpers import wrap_callable
|
||||
1089
lib/sqlalchemy/util/_collections.py
Normal file
1089
lib/sqlalchemy/util/_collections.py
Normal file
File diff suppressed because it is too large
Load Diff
67
lib/sqlalchemy/util/_compat_py3k.py
Normal file
67
lib/sqlalchemy/util/_compat_py3k.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# util/_compat_py3k.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from functools import wraps
|
||||
|
||||
# vendored from py3.7
|
||||
|
||||
|
||||
class _AsyncGeneratorContextManager:
|
||||
"""Helper for @asynccontextmanager."""
|
||||
|
||||
def __init__(self, func, args, kwds):
|
||||
self.gen = func(*args, **kwds)
|
||||
self.func, self.args, self.kwds = func, args, kwds
|
||||
doc = getattr(func, "__doc__", None)
|
||||
if doc is None:
|
||||
doc = type(self).__doc__
|
||||
self.__doc__ = doc
|
||||
|
||||
async def __aenter__(self):
|
||||
try:
|
||||
return await self.gen.__anext__()
|
||||
except StopAsyncIteration:
|
||||
raise RuntimeError("generator didn't yield") from None
|
||||
|
||||
async def __aexit__(self, typ, value, traceback):
|
||||
if typ is None:
|
||||
try:
|
||||
await self.gen.__anext__()
|
||||
except StopAsyncIteration:
|
||||
return
|
||||
else:
|
||||
raise RuntimeError("generator didn't stop")
|
||||
else:
|
||||
if value is None:
|
||||
value = typ()
|
||||
# See _GeneratorContextManager.__exit__ for comments on subtleties
|
||||
# in this implementation
|
||||
try:
|
||||
await self.gen.athrow(typ, value, traceback)
|
||||
raise RuntimeError("generator didn't stop after athrow()")
|
||||
except StopAsyncIteration as exc:
|
||||
return exc is not value
|
||||
except RuntimeError as exc:
|
||||
if exc is value:
|
||||
return False
|
||||
if isinstance(value, (StopIteration, StopAsyncIteration)):
|
||||
if exc.__cause__ is value:
|
||||
return False
|
||||
raise
|
||||
except BaseException as exc:
|
||||
if exc is not value:
|
||||
raise
|
||||
|
||||
|
||||
# using the vendored version in all cases at the moment to establish
|
||||
# full test coverage
|
||||
def asynccontextmanager(func):
|
||||
@wraps(func)
|
||||
def helper(*args, **kwds):
|
||||
return _AsyncGeneratorContextManager(func, args, kwds)
|
||||
|
||||
return helper
|
||||
194
lib/sqlalchemy/util/_concurrency_py3k.py
Normal file
194
lib/sqlalchemy/util/_concurrency_py3k.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# util/_concurrency_py3k.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Coroutine
|
||||
|
||||
import greenlet
|
||||
|
||||
from . import compat
|
||||
from .langhelpers import memoized_property
|
||||
from .. import exc
|
||||
|
||||
# If greenlet.gr_context is present in current version of greenlet,
|
||||
# it will be set with the current context on creation.
|
||||
# Refs: https://github.com/python-greenlet/greenlet/pull/198
|
||||
_has_gr_context = hasattr(greenlet.getcurrent(), "gr_context")
|
||||
|
||||
|
||||
def is_exit_exception(e):
|
||||
# note asyncio.CancelledError is already BaseException
|
||||
# so was an exit exception in any case
|
||||
return not isinstance(e, Exception) or isinstance(
|
||||
e, (asyncio.TimeoutError, asyncio.CancelledError)
|
||||
)
|
||||
|
||||
|
||||
# implementation based on snaury gist at
|
||||
# https://gist.github.com/snaury/202bf4f22c41ca34e56297bae5f33fef
|
||||
# Issue for context: https://github.com/python-greenlet/greenlet/issues/173
|
||||
|
||||
|
||||
class _AsyncIoGreenlet(greenlet.greenlet):
|
||||
def __init__(self, fn, driver):
|
||||
greenlet.greenlet.__init__(self, fn, driver)
|
||||
self.driver = driver
|
||||
if _has_gr_context:
|
||||
self.gr_context = driver.gr_context
|
||||
|
||||
|
||||
def await_only(awaitable: Coroutine) -> Any:
|
||||
"""Awaits an async function in a sync method.
|
||||
|
||||
The sync method must be inside a :func:`greenlet_spawn` context.
|
||||
:func:`await_only` calls cannot be nested.
|
||||
|
||||
:param awaitable: The coroutine to call.
|
||||
|
||||
"""
|
||||
# this is called in the context greenlet while running fn
|
||||
current = greenlet.getcurrent()
|
||||
if not isinstance(current, _AsyncIoGreenlet):
|
||||
raise exc.MissingGreenlet(
|
||||
"greenlet_spawn has not been called; can't call await_only() "
|
||||
"here. Was IO attempted in an unexpected place?"
|
||||
)
|
||||
|
||||
# returns the control to the driver greenlet passing it
|
||||
# a coroutine to run. Once the awaitable is done, the driver greenlet
|
||||
# switches back to this greenlet with the result of awaitable that is
|
||||
# then returned to the caller (or raised as error)
|
||||
return current.driver.switch(awaitable)
|
||||
|
||||
|
||||
def await_fallback(awaitable: Coroutine) -> Any:
|
||||
"""Awaits an async function in a sync method.
|
||||
|
||||
The sync method must be inside a :func:`greenlet_spawn` context.
|
||||
:func:`await_fallback` calls cannot be nested.
|
||||
|
||||
:param awaitable: The coroutine to call.
|
||||
|
||||
"""
|
||||
# this is called in the context greenlet while running fn
|
||||
current = greenlet.getcurrent()
|
||||
if not isinstance(current, _AsyncIoGreenlet):
|
||||
loop = get_event_loop()
|
||||
if loop.is_running():
|
||||
raise exc.MissingGreenlet(
|
||||
"greenlet_spawn has not been called and asyncio event "
|
||||
"loop is already running; can't call await_fallback() here. "
|
||||
"Was IO attempted in an unexpected place?"
|
||||
)
|
||||
return loop.run_until_complete(awaitable)
|
||||
|
||||
return current.driver.switch(awaitable)
|
||||
|
||||
|
||||
async def greenlet_spawn(
|
||||
fn: Callable, *args, _require_await=False, **kwargs
|
||||
) -> Any:
|
||||
"""Runs a sync function ``fn`` in a new greenlet.
|
||||
|
||||
The sync function can then use :func:`await_only` to wait for async
|
||||
functions.
|
||||
|
||||
:param fn: The sync callable to call.
|
||||
:param \\*args: Positional arguments to pass to the ``fn`` callable.
|
||||
:param \\*\\*kwargs: Keyword arguments to pass to the ``fn`` callable.
|
||||
"""
|
||||
|
||||
context = _AsyncIoGreenlet(fn, greenlet.getcurrent())
|
||||
# runs the function synchronously in gl greenlet. If the execution
|
||||
# is interrupted by await_only, context is not dead and result is a
|
||||
# coroutine to wait. If the context is dead the function has
|
||||
# returned, and its result can be returned.
|
||||
switch_occurred = False
|
||||
try:
|
||||
result = context.switch(*args, **kwargs)
|
||||
while not context.dead:
|
||||
switch_occurred = True
|
||||
try:
|
||||
# wait for a coroutine from await_only and then return its
|
||||
# result back to it.
|
||||
value = await result
|
||||
except BaseException:
|
||||
# this allows an exception to be raised within
|
||||
# the moderated greenlet so that it can continue
|
||||
# its expected flow.
|
||||
result = context.throw(*sys.exc_info())
|
||||
else:
|
||||
result = context.switch(value)
|
||||
finally:
|
||||
# clean up to avoid cycle resolution by gc
|
||||
del context.driver
|
||||
if _require_await and not switch_occurred:
|
||||
raise exc.AwaitRequired(
|
||||
"The current operation required an async execution but none was "
|
||||
"detected. This will usually happen when using a non compatible "
|
||||
"DBAPI driver. Please ensure that an async DBAPI is used."
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class AsyncAdaptedLock:
|
||||
@memoized_property
|
||||
def mutex(self):
|
||||
# there should not be a race here for coroutines creating the
|
||||
# new lock as we are not using await, so therefore no concurrency
|
||||
return asyncio.Lock()
|
||||
|
||||
def __enter__(self):
|
||||
# await is used to acquire the lock only after the first calling
|
||||
# coroutine has created the mutex.
|
||||
await_fallback(self.mutex.acquire())
|
||||
return self
|
||||
|
||||
def __exit__(self, *arg, **kw):
|
||||
self.mutex.release()
|
||||
|
||||
|
||||
def _util_async_run_coroutine_function(fn, *args, **kwargs):
|
||||
"""for test suite/ util only"""
|
||||
|
||||
loop = get_event_loop()
|
||||
if loop.is_running():
|
||||
raise Exception(
|
||||
"for async run coroutine we expect that no greenlet or event "
|
||||
"loop is running when we start out"
|
||||
)
|
||||
return loop.run_until_complete(fn(*args, **kwargs))
|
||||
|
||||
|
||||
def _util_async_run(fn, *args, **kwargs):
|
||||
"""for test suite/ util only"""
|
||||
|
||||
loop = get_event_loop()
|
||||
if not loop.is_running():
|
||||
return loop.run_until_complete(greenlet_spawn(fn, *args, **kwargs))
|
||||
else:
|
||||
# allow for a wrapped test function to call another
|
||||
assert isinstance(greenlet.getcurrent(), _AsyncIoGreenlet)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
|
||||
def get_event_loop():
|
||||
"""vendor asyncio.get_event_loop() for python 3.7 and above.
|
||||
|
||||
Python 3.10 deprecates get_event_loop() as a standalone.
|
||||
|
||||
"""
|
||||
if compat.py37:
|
||||
try:
|
||||
return asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
return asyncio.get_event_loop_policy().get_event_loop()
|
||||
else:
|
||||
return asyncio.get_event_loop()
|
||||
68
lib/sqlalchemy/util/_preloaded.py
Normal file
68
lib/sqlalchemy/util/_preloaded.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# util/_preloaded.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""supplies the "preloaded" registry to resolve circular module imports at
|
||||
runtime.
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from . import compat
|
||||
|
||||
|
||||
class _ModuleRegistry:
|
||||
"""Registry of modules to load in a package init file.
|
||||
|
||||
To avoid potential thread safety issues for imports that are deferred
|
||||
in a function, like https://bugs.python.org/issue38884, these modules
|
||||
are added to the system module cache by importing them after the packages
|
||||
has finished initialization.
|
||||
|
||||
A global instance is provided under the name :attr:`.preloaded`. Use
|
||||
the function :func:`.preload_module` to register modules to load and
|
||||
:meth:`.import_prefix` to load all the modules that start with the
|
||||
given path.
|
||||
|
||||
While the modules are loaded in the global module cache, it's advisable
|
||||
to access them using :attr:`.preloaded` to ensure that it was actually
|
||||
registered. Each registered module is added to the instance ``__dict__``
|
||||
in the form `<package>_<module>`, omitting ``sqlalchemy`` from the package
|
||||
name. Example: ``sqlalchemy.sql.util`` becomes ``preloaded.sql_util``.
|
||||
"""
|
||||
|
||||
def __init__(self, prefix="sqlalchemy."):
|
||||
self.module_registry = set()
|
||||
self.prefix = prefix
|
||||
|
||||
def preload_module(self, *deps):
|
||||
"""Adds the specified modules to the list to load.
|
||||
|
||||
This method can be used both as a normal function and as a decorator.
|
||||
No change is performed to the decorated object.
|
||||
"""
|
||||
self.module_registry.update(deps)
|
||||
return lambda fn: fn
|
||||
|
||||
def import_prefix(self, path):
|
||||
"""Resolve all the modules in the registry that start with the
|
||||
specified path.
|
||||
"""
|
||||
for module in self.module_registry:
|
||||
if self.prefix:
|
||||
key = module.split(self.prefix)[-1].replace(".", "_")
|
||||
else:
|
||||
key = module
|
||||
if (
|
||||
not path or module.startswith(path)
|
||||
) and key not in self.__dict__:
|
||||
compat.import_(module, globals(), locals())
|
||||
self.__dict__[key] = sys.modules[module]
|
||||
|
||||
|
||||
preloaded = _ModuleRegistry()
|
||||
preload_module = preloaded.preload_module
|
||||
632
lib/sqlalchemy/util/compat.py
Normal file
632
lib/sqlalchemy/util/compat.py
Normal file
@@ -0,0 +1,632 @@
|
||||
# util/compat.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Handle Python version/platform incompatibilities."""
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import inspect
|
||||
import operator
|
||||
import platform
|
||||
import sys
|
||||
|
||||
py311 = sys.version_info >= (3, 11)
|
||||
py39 = sys.version_info >= (3, 9)
|
||||
py38 = sys.version_info >= (3, 8)
|
||||
py37 = sys.version_info >= (3, 7)
|
||||
py3k = sys.version_info >= (3, 0)
|
||||
py2k = sys.version_info < (3, 0)
|
||||
pypy = platform.python_implementation() == "PyPy"
|
||||
|
||||
|
||||
cpython = platform.python_implementation() == "CPython"
|
||||
win32 = sys.platform.startswith("win")
|
||||
osx = sys.platform.startswith("darwin")
|
||||
arm = "aarch" in platform.machine().lower()
|
||||
|
||||
has_refcount_gc = bool(cpython)
|
||||
|
||||
contextmanager = contextlib.contextmanager
|
||||
dottedgetter = operator.attrgetter
|
||||
namedtuple = collections.namedtuple
|
||||
next = next # noqa
|
||||
|
||||
FullArgSpec = collections.namedtuple(
|
||||
"FullArgSpec",
|
||||
[
|
||||
"args",
|
||||
"varargs",
|
||||
"varkw",
|
||||
"defaults",
|
||||
"kwonlyargs",
|
||||
"kwonlydefaults",
|
||||
"annotations",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class nullcontext(object):
|
||||
"""Context manager that does no additional processing.
|
||||
|
||||
Vendored from Python 3.7.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, enter_result=None):
|
||||
self.enter_result = enter_result
|
||||
|
||||
def __enter__(self):
|
||||
return self.enter_result
|
||||
|
||||
def __exit__(self, *excinfo):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
import dummy_threading as threading # noqa
|
||||
|
||||
|
||||
def inspect_getfullargspec(func):
|
||||
"""Fully vendored version of getfullargspec from Python 3.3."""
|
||||
|
||||
if inspect.ismethod(func):
|
||||
func = func.__func__
|
||||
if not inspect.isfunction(func):
|
||||
raise TypeError("{!r} is not a Python function".format(func))
|
||||
|
||||
co = func.__code__
|
||||
if not inspect.iscode(co):
|
||||
raise TypeError("{!r} is not a code object".format(co))
|
||||
|
||||
nargs = co.co_argcount
|
||||
names = co.co_varnames
|
||||
nkwargs = co.co_kwonlyargcount if py3k else 0
|
||||
args = list(names[:nargs])
|
||||
kwonlyargs = list(names[nargs : nargs + nkwargs])
|
||||
|
||||
nargs += nkwargs
|
||||
varargs = None
|
||||
if co.co_flags & inspect.CO_VARARGS:
|
||||
varargs = co.co_varnames[nargs]
|
||||
nargs = nargs + 1
|
||||
varkw = None
|
||||
if co.co_flags & inspect.CO_VARKEYWORDS:
|
||||
varkw = co.co_varnames[nargs]
|
||||
|
||||
return FullArgSpec(
|
||||
args,
|
||||
varargs,
|
||||
varkw,
|
||||
func.__defaults__,
|
||||
kwonlyargs,
|
||||
func.__kwdefaults__ if py3k else None,
|
||||
func.__annotations__ if py3k else {},
|
||||
)
|
||||
|
||||
|
||||
if py38:
|
||||
from importlib import metadata as importlib_metadata
|
||||
else:
|
||||
import importlib_metadata # noqa
|
||||
|
||||
|
||||
def importlib_metadata_get(group):
|
||||
ep = importlib_metadata.entry_points()
|
||||
if hasattr(ep, "select"):
|
||||
return ep.select(group=group)
|
||||
else:
|
||||
return ep.get(group, ())
|
||||
|
||||
|
||||
if py3k:
|
||||
import base64
|
||||
import builtins
|
||||
import configparser
|
||||
import itertools
|
||||
import pickle
|
||||
|
||||
from functools import reduce
|
||||
from io import BytesIO as byte_buffer
|
||||
from io import StringIO
|
||||
from itertools import zip_longest
|
||||
from time import perf_counter
|
||||
from urllib.parse import (
|
||||
quote_plus,
|
||||
unquote_plus,
|
||||
parse_qsl,
|
||||
quote,
|
||||
unquote,
|
||||
)
|
||||
|
||||
string_types = (str,)
|
||||
binary_types = (bytes,)
|
||||
binary_type = bytes
|
||||
text_type = str
|
||||
int_types = (int,)
|
||||
iterbytes = iter
|
||||
long_type = int
|
||||
|
||||
itertools_filterfalse = itertools.filterfalse
|
||||
itertools_filter = filter
|
||||
itertools_imap = map
|
||||
|
||||
exec_ = getattr(builtins, "exec")
|
||||
import_ = getattr(builtins, "__import__")
|
||||
print_ = getattr(builtins, "print")
|
||||
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def b64decode(x):
|
||||
return base64.b64decode(x.encode("ascii"))
|
||||
|
||||
def b64encode(x):
|
||||
return base64.b64encode(x).decode("ascii")
|
||||
|
||||
def decode_backslashreplace(text, encoding):
|
||||
return text.decode(encoding, errors="backslashreplace")
|
||||
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
||||
def raise_(
|
||||
exception, with_traceback=None, replace_context=None, from_=False
|
||||
):
|
||||
r"""implement "raise" with cause support.
|
||||
|
||||
:param exception: exception to raise
|
||||
:param with_traceback: will call exception.with_traceback()
|
||||
:param replace_context: an as-yet-unsupported feature. This is
|
||||
an exception object which we are "replacing", e.g., it's our
|
||||
"cause" but we don't want it printed. Basically just what
|
||||
``__suppress_context__`` does but we don't want to suppress
|
||||
the enclosing context, if any. So for now we make it the
|
||||
cause.
|
||||
:param from\_: the cause. this actually sets the cause and doesn't
|
||||
hope to hide it someday.
|
||||
|
||||
"""
|
||||
if with_traceback is not None:
|
||||
exception = exception.with_traceback(with_traceback)
|
||||
|
||||
if from_ is not False:
|
||||
exception.__cause__ = from_
|
||||
elif replace_context is not None:
|
||||
# no good solution here, we would like to have the exception
|
||||
# have only the context of replace_context.__context__ so that the
|
||||
# intermediary exception does not change, but we can't figure
|
||||
# that out.
|
||||
exception.__cause__ = replace_context
|
||||
|
||||
try:
|
||||
raise exception
|
||||
finally:
|
||||
# credit to
|
||||
# https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
|
||||
# as the __traceback__ object creates a cycle
|
||||
del exception, replace_context, from_, with_traceback
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
|
||||
def ue(s):
|
||||
return s
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
# Unused. Kept for backwards compatibility.
|
||||
callable = callable # noqa
|
||||
|
||||
from abc import ABC
|
||||
|
||||
def _qualname(fn):
|
||||
return fn.__qualname__
|
||||
|
||||
|
||||
else:
|
||||
import base64
|
||||
import ConfigParser as configparser # noqa
|
||||
import itertools
|
||||
|
||||
from StringIO import StringIO # noqa
|
||||
from cStringIO import StringIO as byte_buffer # noqa
|
||||
from itertools import izip_longest as zip_longest # noqa
|
||||
from time import clock as perf_counter # noqa
|
||||
from urllib import quote # noqa
|
||||
from urllib import quote_plus # noqa
|
||||
from urllib import unquote # noqa
|
||||
from urllib import unquote_plus # noqa
|
||||
from urlparse import parse_qsl # noqa
|
||||
|
||||
from abc import ABCMeta
|
||||
|
||||
class ABC(object):
|
||||
__metaclass__ = ABCMeta
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle # noqa
|
||||
|
||||
string_types = (basestring,) # noqa
|
||||
binary_types = (bytes,)
|
||||
binary_type = str
|
||||
text_type = unicode # noqa
|
||||
int_types = int, long # noqa
|
||||
long_type = long # noqa
|
||||
|
||||
callable = callable # noqa
|
||||
cmp = cmp # noqa
|
||||
reduce = reduce # noqa
|
||||
|
||||
b64encode = base64.b64encode
|
||||
b64decode = base64.b64decode
|
||||
|
||||
itertools_filterfalse = itertools.ifilterfalse
|
||||
itertools_filter = itertools.ifilter
|
||||
itertools_imap = itertools.imap
|
||||
|
||||
def b(s):
|
||||
return s
|
||||
|
||||
def exec_(func_text, globals_, lcl=None):
|
||||
if lcl is None:
|
||||
exec("exec func_text in globals_")
|
||||
else:
|
||||
exec("exec func_text in globals_, lcl")
|
||||
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
|
||||
def import_(*args):
|
||||
if len(args) == 4:
|
||||
args = args[0:3] + ([str(arg) for arg in args[3]],)
|
||||
return __import__(*args)
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
for arg in enumerate(args):
|
||||
if not isinstance(arg, basestring): # noqa
|
||||
arg = str(arg)
|
||||
fp.write(arg)
|
||||
|
||||
def u(s):
|
||||
# this differs from what six does, which doesn't support non-ASCII
|
||||
# strings - we only use u() with
|
||||
# literal source strings, and all our source files with non-ascii
|
||||
# in them (all are tests) are utf-8 encoded.
|
||||
return unicode(s, "utf-8") # noqa
|
||||
|
||||
def ue(s):
|
||||
return unicode(s, "unicode_escape") # noqa
|
||||
|
||||
def decode_backslashreplace(text, encoding):
|
||||
try:
|
||||
return text.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
# regular "backslashreplace" for an incompatible encoding raises:
|
||||
# "TypeError: don't know how to handle UnicodeDecodeError in
|
||||
# error callback"
|
||||
return repr(text)[1:-1].decode()
|
||||
|
||||
def safe_bytestring(text):
|
||||
# py2k only
|
||||
if not isinstance(text, string_types):
|
||||
return unicode(text).encode( # noqa: F821
|
||||
"ascii", errors="backslashreplace"
|
||||
)
|
||||
elif isinstance(text, unicode): # noqa: F821
|
||||
return text.encode("ascii", errors="backslashreplace")
|
||||
else:
|
||||
return text
|
||||
|
||||
exec(
|
||||
"def raise_(exception, with_traceback=None, replace_context=None, "
|
||||
"from_=False):\n"
|
||||
" if with_traceback:\n"
|
||||
" raise type(exception), exception, with_traceback\n"
|
||||
" else:\n"
|
||||
" raise exception\n"
|
||||
)
|
||||
|
||||
TYPE_CHECKING = False
|
||||
|
||||
def _qualname(meth):
|
||||
"""return __qualname__ equivalent for a method on a class"""
|
||||
|
||||
for cls in meth.im_class.__mro__:
|
||||
if meth.__name__ in cls.__dict__:
|
||||
break
|
||||
else:
|
||||
return meth.__name__
|
||||
|
||||
return "%s.%s" % (cls.__name__, meth.__name__)
|
||||
|
||||
|
||||
if py3k:
|
||||
|
||||
def _formatannotation(annotation, base_module=None):
|
||||
"""vendored from python 3.7"""
|
||||
|
||||
if getattr(annotation, "__module__", None) == "typing":
|
||||
return repr(annotation).replace("typing.", "")
|
||||
if isinstance(annotation, type):
|
||||
if annotation.__module__ in ("builtins", base_module):
|
||||
return annotation.__qualname__
|
||||
return annotation.__module__ + "." + annotation.__qualname__
|
||||
return repr(annotation)
|
||||
|
||||
def inspect_formatargspec(
|
||||
args,
|
||||
varargs=None,
|
||||
varkw=None,
|
||||
defaults=None,
|
||||
kwonlyargs=(),
|
||||
kwonlydefaults={},
|
||||
annotations={},
|
||||
formatarg=str,
|
||||
formatvarargs=lambda name: "*" + name,
|
||||
formatvarkw=lambda name: "**" + name,
|
||||
formatvalue=lambda value: "=" + repr(value),
|
||||
formatreturns=lambda text: " -> " + text,
|
||||
formatannotation=_formatannotation,
|
||||
):
|
||||
"""Copy formatargspec from python 3.7 standard library.
|
||||
|
||||
Python 3 has deprecated formatargspec and requested that Signature
|
||||
be used instead, however this requires a full reimplementation
|
||||
of formatargspec() in terms of creating Parameter objects and such.
|
||||
Instead of introducing all the object-creation overhead and having
|
||||
to reinvent from scratch, just copy their compatibility routine.
|
||||
|
||||
Ultimately we would need to rewrite our "decorator" routine completely
|
||||
which is not really worth it right now, until all Python 2.x support
|
||||
is dropped.
|
||||
|
||||
"""
|
||||
|
||||
kwonlydefaults = kwonlydefaults or {}
|
||||
annotations = annotations or {}
|
||||
|
||||
def formatargandannotation(arg):
|
||||
result = formatarg(arg)
|
||||
if arg in annotations:
|
||||
result += ": " + formatannotation(annotations[arg])
|
||||
return result
|
||||
|
||||
specs = []
|
||||
if defaults:
|
||||
firstdefault = len(args) - len(defaults)
|
||||
for i, arg in enumerate(args):
|
||||
spec = formatargandannotation(arg)
|
||||
if defaults and i >= firstdefault:
|
||||
spec = spec + formatvalue(defaults[i - firstdefault])
|
||||
specs.append(spec)
|
||||
|
||||
if varargs is not None:
|
||||
specs.append(formatvarargs(formatargandannotation(varargs)))
|
||||
else:
|
||||
if kwonlyargs:
|
||||
specs.append("*")
|
||||
|
||||
if kwonlyargs:
|
||||
for kwonlyarg in kwonlyargs:
|
||||
spec = formatargandannotation(kwonlyarg)
|
||||
if kwonlydefaults and kwonlyarg in kwonlydefaults:
|
||||
spec += formatvalue(kwonlydefaults[kwonlyarg])
|
||||
specs.append(spec)
|
||||
|
||||
if varkw is not None:
|
||||
specs.append(formatvarkw(formatargandannotation(varkw)))
|
||||
|
||||
result = "(" + ", ".join(specs) + ")"
|
||||
if "return" in annotations:
|
||||
result += formatreturns(formatannotation(annotations["return"]))
|
||||
return result
|
||||
|
||||
|
||||
else:
|
||||
from inspect import formatargspec as _inspect_formatargspec
|
||||
|
||||
def inspect_formatargspec(*spec, **kw):
|
||||
# convert for a potential FullArgSpec from compat.getfullargspec()
|
||||
return _inspect_formatargspec(*spec[0:4], **kw) # noqa
|
||||
|
||||
|
||||
# Fix deprecation of accessing ABCs straight from collections module
|
||||
# (which will stop working in 3.8).
|
||||
if py3k:
|
||||
import collections.abc as collections_abc
|
||||
else:
|
||||
import collections as collections_abc # noqa
|
||||
|
||||
|
||||
if py37:
|
||||
import dataclasses
|
||||
|
||||
def dataclass_fields(cls):
|
||||
"""Return a sequence of all dataclasses.Field objects associated
|
||||
with a class."""
|
||||
|
||||
if dataclasses.is_dataclass(cls):
|
||||
return dataclasses.fields(cls)
|
||||
else:
|
||||
return []
|
||||
|
||||
def local_dataclass_fields(cls):
|
||||
"""Return a sequence of all dataclasses.Field objects associated with
|
||||
a class, excluding those that originate from a superclass."""
|
||||
|
||||
if dataclasses.is_dataclass(cls):
|
||||
super_fields = set()
|
||||
for sup in cls.__bases__:
|
||||
super_fields.update(dataclass_fields(sup))
|
||||
return [
|
||||
f for f in dataclasses.fields(cls) if f not in super_fields
|
||||
]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
else:
|
||||
|
||||
def dataclass_fields(cls):
|
||||
return []
|
||||
|
||||
def local_dataclass_fields(cls):
|
||||
return []
|
||||
|
||||
|
||||
def raise_from_cause(exception, exc_info=None):
|
||||
r"""legacy. use raise\_()"""
|
||||
|
||||
if exc_info is None:
|
||||
exc_info = sys.exc_info()
|
||||
exc_type, exc_value, exc_tb = exc_info
|
||||
cause = exc_value if exc_value is not exception else None
|
||||
reraise(type(exception), exception, tb=exc_tb, cause=cause)
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None, cause=None):
|
||||
r"""legacy. use raise\_()"""
|
||||
|
||||
raise_(value, with_traceback=tb, from_=cause)
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases, **kw):
|
||||
"""Create a base class with a metaclass.
|
||||
|
||||
Drops the middle class upon creation.
|
||||
|
||||
Source: https://lucumr.pocoo.org/2013/5/21/porting-to-python-3-redux/
|
||||
|
||||
"""
|
||||
|
||||
class metaclass(meta):
|
||||
__call__ = type.__call__
|
||||
__init__ = type.__init__
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
if this_bases is None:
|
||||
cls = type.__new__(cls, name, (), d)
|
||||
else:
|
||||
cls = meta(name, bases, d)
|
||||
|
||||
if hasattr(cls, "__init_subclass__") and hasattr(
|
||||
cls.__init_subclass__, "__func__"
|
||||
):
|
||||
cls.__init_subclass__.__func__(cls, **kw)
|
||||
return cls
|
||||
|
||||
return metaclass("temporary_class", None, {})
|
||||
|
||||
|
||||
if py3k:
|
||||
from datetime import timezone
|
||||
else:
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from datetime import tzinfo
|
||||
|
||||
class timezone(tzinfo):
|
||||
"""Minimal port of python 3 timezone object"""
|
||||
|
||||
__slots__ = "_offset"
|
||||
|
||||
def __init__(self, offset):
|
||||
if not isinstance(offset, timedelta):
|
||||
raise TypeError("offset must be a timedelta")
|
||||
if not self._minoffset <= offset <= self._maxoffset:
|
||||
raise ValueError(
|
||||
"offset must be a timedelta "
|
||||
"strictly between -timedelta(hours=24) and "
|
||||
"timedelta(hours=24)."
|
||||
)
|
||||
self._offset = offset
|
||||
|
||||
def __eq__(self, other):
|
||||
if type(other) != timezone:
|
||||
return False
|
||||
return self._offset == other._offset
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._offset)
|
||||
|
||||
def __repr__(self):
|
||||
return "sqlalchemy.util.%s(%r)" % (
|
||||
self.__class__.__name__,
|
||||
self._offset,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.tzname(None)
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self._offset
|
||||
|
||||
def tzname(self, dt):
|
||||
return self._name_from_offset(self._offset)
|
||||
|
||||
def dst(self, dt):
|
||||
return None
|
||||
|
||||
def fromutc(self, dt):
|
||||
if isinstance(dt, datetime):
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("fromutc: dt.tzinfo " "is not self")
|
||||
return dt + self._offset
|
||||
raise TypeError(
|
||||
"fromutc() argument must be a datetime instance" " or None"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _timedelta_to_microseconds(timedelta):
|
||||
"""backport of timedelta._to_microseconds()"""
|
||||
return (
|
||||
timedelta.days * (24 * 3600) + timedelta.seconds
|
||||
) * 1000000 + timedelta.microseconds
|
||||
|
||||
@staticmethod
|
||||
def _divmod_timedeltas(a, b):
|
||||
"""backport of timedelta.__divmod__"""
|
||||
|
||||
q, r = divmod(
|
||||
timezone._timedelta_to_microseconds(a),
|
||||
timezone._timedelta_to_microseconds(b),
|
||||
)
|
||||
return q, timedelta(0, 0, r)
|
||||
|
||||
@staticmethod
|
||||
def _name_from_offset(delta):
|
||||
if not delta:
|
||||
return "UTC"
|
||||
if delta < timedelta(0):
|
||||
sign = "-"
|
||||
delta = -delta
|
||||
else:
|
||||
sign = "+"
|
||||
hours, rest = timezone._divmod_timedeltas(
|
||||
delta, timedelta(hours=1)
|
||||
)
|
||||
minutes, rest = timezone._divmod_timedeltas(
|
||||
rest, timedelta(minutes=1)
|
||||
)
|
||||
result = "UTC%s%02d:%02d" % (sign, hours, minutes)
|
||||
if rest.seconds:
|
||||
result += ":%02d" % (rest.seconds,)
|
||||
if rest.microseconds:
|
||||
result += ".%06d" % (rest.microseconds,)
|
||||
return result
|
||||
|
||||
_maxoffset = timedelta(hours=23, minutes=59)
|
||||
_minoffset = -_maxoffset
|
||||
|
||||
timezone.utc = timezone(timedelta(0))
|
||||
73
lib/sqlalchemy/util/concurrency.py
Normal file
73
lib/sqlalchemy/util/concurrency.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# util/concurrency.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from . import compat
|
||||
|
||||
have_greenlet = False
|
||||
greenlet_error = None
|
||||
|
||||
if compat.py3k:
|
||||
try:
|
||||
import greenlet # noqa: F401
|
||||
except ImportError as e:
|
||||
greenlet_error = str(e)
|
||||
else:
|
||||
have_greenlet = True
|
||||
from ._concurrency_py3k import await_only
|
||||
from ._concurrency_py3k import await_fallback
|
||||
from ._concurrency_py3k import greenlet_spawn
|
||||
from ._concurrency_py3k import is_exit_exception
|
||||
from ._concurrency_py3k import AsyncAdaptedLock
|
||||
from ._concurrency_py3k import _util_async_run # noqa: F401
|
||||
from ._concurrency_py3k import (
|
||||
_util_async_run_coroutine_function,
|
||||
) # noqa: F401, E501
|
||||
from ._concurrency_py3k import asyncio # noqa: F401
|
||||
|
||||
# does not need greennlet, just Python 3
|
||||
from ._compat_py3k import asynccontextmanager # noqa: F401
|
||||
|
||||
if not have_greenlet:
|
||||
|
||||
asyncio = None # noqa: F811
|
||||
|
||||
def _not_implemented():
|
||||
# this conditional is to prevent pylance from considering
|
||||
# greenlet_spawn() etc as "no return" and dimming out code below it
|
||||
if have_greenlet:
|
||||
return None
|
||||
|
||||
if not compat.py3k:
|
||||
raise ValueError("Cannot use this function in py2.")
|
||||
else:
|
||||
raise ValueError(
|
||||
"the greenlet library is required to use this function."
|
||||
" %s" % greenlet_error
|
||||
if greenlet_error
|
||||
else ""
|
||||
)
|
||||
|
||||
def is_exit_exception(e): # noqa: F811
|
||||
return not isinstance(e, Exception)
|
||||
|
||||
def await_only(thing): # noqa: F811
|
||||
_not_implemented()
|
||||
|
||||
def await_fallback(thing): # noqa: F811
|
||||
return thing
|
||||
|
||||
def greenlet_spawn(fn, *args, **kw): # noqa: F811
|
||||
_not_implemented()
|
||||
|
||||
def AsyncAdaptedLock(*args, **kw): # noqa: F811
|
||||
_not_implemented()
|
||||
|
||||
def _util_async_run(fn, *arg, **kw): # noqa: F811
|
||||
return fn(*arg, **kw)
|
||||
|
||||
def _util_async_run_coroutine_function(fn, *arg, **kw): # noqa: F811
|
||||
_not_implemented()
|
||||
417
lib/sqlalchemy/util/deprecations.py
Normal file
417
lib/sqlalchemy/util/deprecations.py
Normal file
@@ -0,0 +1,417 @@
|
||||
# util/deprecations.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Helpers related to deprecation of functions, methods, classes, other
|
||||
functionality."""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from . import compat
|
||||
from .langhelpers import _hash_limit_string
|
||||
from .langhelpers import _warnings_warn
|
||||
from .langhelpers import decorator
|
||||
from .langhelpers import inject_docstring_text
|
||||
from .langhelpers import inject_param_text
|
||||
from .. import exc
|
||||
|
||||
|
||||
SQLALCHEMY_WARN_20 = False
|
||||
|
||||
if os.getenv("SQLALCHEMY_WARN_20", "false").lower() in ("true", "yes", "1"):
|
||||
SQLALCHEMY_WARN_20 = True
|
||||
|
||||
|
||||
def _warn_with_version(msg, version, type_, stacklevel, code=None):
|
||||
if (
|
||||
issubclass(type_, exc.Base20DeprecationWarning)
|
||||
and not SQLALCHEMY_WARN_20
|
||||
):
|
||||
return
|
||||
|
||||
warn = type_(msg, code=code)
|
||||
warn.deprecated_since = version
|
||||
|
||||
_warnings_warn(warn, stacklevel=stacklevel + 1)
|
||||
|
||||
|
||||
def warn_deprecated(msg, version, stacklevel=3, code=None):
|
||||
_warn_with_version(
|
||||
msg, version, exc.SADeprecationWarning, stacklevel, code=code
|
||||
)
|
||||
|
||||
|
||||
def warn_deprecated_limited(msg, args, version, stacklevel=3, code=None):
|
||||
"""Issue a deprecation warning with a parameterized string,
|
||||
limiting the number of registrations.
|
||||
|
||||
"""
|
||||
if args:
|
||||
msg = _hash_limit_string(msg, 10, args)
|
||||
_warn_with_version(
|
||||
msg, version, exc.SADeprecationWarning, stacklevel, code=code
|
||||
)
|
||||
|
||||
|
||||
def warn_deprecated_20(msg, stacklevel=3, code=None):
|
||||
|
||||
_warn_with_version(
|
||||
msg,
|
||||
exc.RemovedIn20Warning.deprecated_since,
|
||||
exc.RemovedIn20Warning,
|
||||
stacklevel,
|
||||
code=code,
|
||||
)
|
||||
|
||||
|
||||
def deprecated_cls(version, message, constructor="__init__"):
|
||||
header = ".. deprecated:: %s %s" % (version, (message or ""))
|
||||
|
||||
def decorate(cls):
|
||||
return _decorate_cls_with_warning(
|
||||
cls,
|
||||
constructor,
|
||||
exc.SADeprecationWarning,
|
||||
message % dict(func=constructor),
|
||||
version,
|
||||
header,
|
||||
)
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def deprecated_20_cls(
|
||||
clsname, alternative=None, constructor="__init__", becomes_legacy=False
|
||||
):
|
||||
message = (
|
||||
".. deprecated:: 1.4 The %s class is considered legacy as of the "
|
||||
"1.x series of SQLAlchemy and %s in 2.0."
|
||||
% (
|
||||
clsname,
|
||||
"will be removed"
|
||||
if not becomes_legacy
|
||||
else "becomes a legacy construct",
|
||||
)
|
||||
)
|
||||
|
||||
if alternative:
|
||||
message += " " + alternative
|
||||
|
||||
if becomes_legacy:
|
||||
warning_cls = exc.LegacyAPIWarning
|
||||
else:
|
||||
warning_cls = exc.RemovedIn20Warning
|
||||
|
||||
def decorate(cls):
|
||||
return _decorate_cls_with_warning(
|
||||
cls,
|
||||
constructor,
|
||||
warning_cls,
|
||||
message,
|
||||
warning_cls.deprecated_since,
|
||||
message,
|
||||
)
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def deprecated(
|
||||
version,
|
||||
message=None,
|
||||
add_deprecation_to_docstring=True,
|
||||
warning=None,
|
||||
enable_warnings=True,
|
||||
):
|
||||
"""Decorates a function and issues a deprecation warning on use.
|
||||
|
||||
:param version:
|
||||
Issue version in the warning.
|
||||
|
||||
:param message:
|
||||
If provided, issue message in the warning. A sensible default
|
||||
is used if not provided.
|
||||
|
||||
:param add_deprecation_to_docstring:
|
||||
Default True. If False, the wrapped function's __doc__ is left
|
||||
as-is. If True, the 'message' is prepended to the docs if
|
||||
provided, or sensible default if message is omitted.
|
||||
|
||||
"""
|
||||
|
||||
# nothing is deprecated "since" 2.0 at this time. All "removed in 2.0"
|
||||
# should emit the RemovedIn20Warning, but messaging should be expressed
|
||||
# in terms of "deprecated since 1.4".
|
||||
|
||||
if version == "2.0":
|
||||
if warning is None:
|
||||
warning = exc.RemovedIn20Warning
|
||||
version = "1.4"
|
||||
if add_deprecation_to_docstring:
|
||||
header = ".. deprecated:: %s %s" % (
|
||||
version,
|
||||
(message or ""),
|
||||
)
|
||||
else:
|
||||
header = None
|
||||
|
||||
if message is None:
|
||||
message = "Call to deprecated function %(func)s"
|
||||
|
||||
if warning is None:
|
||||
warning = exc.SADeprecationWarning
|
||||
|
||||
if warning is not exc.RemovedIn20Warning:
|
||||
message += " (deprecated since: %s)" % version
|
||||
|
||||
def decorate(fn):
|
||||
return _decorate_with_warning(
|
||||
fn,
|
||||
warning,
|
||||
message % dict(func=fn.__name__),
|
||||
version,
|
||||
header,
|
||||
enable_warnings=enable_warnings,
|
||||
)
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def moved_20(message, **kw):
|
||||
return deprecated(
|
||||
"2.0", message=message, warning=exc.MovedIn20Warning, **kw
|
||||
)
|
||||
|
||||
|
||||
def deprecated_20(api_name, alternative=None, becomes_legacy=False, **kw):
|
||||
type_reg = re.match("^:(attr|func|meth):", api_name)
|
||||
if type_reg:
|
||||
type_ = {"attr": "attribute", "func": "function", "meth": "method"}[
|
||||
type_reg.group(1)
|
||||
]
|
||||
else:
|
||||
type_ = "construct"
|
||||
message = (
|
||||
"The %s %s is considered legacy as of the "
|
||||
"1.x series of SQLAlchemy and %s in 2.0."
|
||||
% (
|
||||
api_name,
|
||||
type_,
|
||||
"will be removed"
|
||||
if not becomes_legacy
|
||||
else "becomes a legacy construct",
|
||||
)
|
||||
)
|
||||
|
||||
if ":attr:" in api_name:
|
||||
attribute_ok = kw.pop("warn_on_attribute_access", False)
|
||||
if not attribute_ok:
|
||||
assert kw.get("enable_warnings") is False, (
|
||||
"attribute %s will emit a warning on read access. "
|
||||
"If you *really* want this, "
|
||||
"add warn_on_attribute_access=True. Otherwise please add "
|
||||
"enable_warnings=False." % api_name
|
||||
)
|
||||
|
||||
if alternative:
|
||||
message += " " + alternative
|
||||
|
||||
if becomes_legacy:
|
||||
warning_cls = exc.LegacyAPIWarning
|
||||
else:
|
||||
warning_cls = exc.RemovedIn20Warning
|
||||
|
||||
return deprecated("2.0", message=message, warning=warning_cls, **kw)
|
||||
|
||||
|
||||
def deprecated_params(**specs):
|
||||
"""Decorates a function to warn on use of certain parameters.
|
||||
|
||||
e.g. ::
|
||||
|
||||
@deprecated_params(
|
||||
weak_identity_map=(
|
||||
"0.7",
|
||||
"the :paramref:`.Session.weak_identity_map parameter "
|
||||
"is deprecated."
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
"""
|
||||
|
||||
messages = {}
|
||||
versions = {}
|
||||
version_warnings = {}
|
||||
|
||||
for param, (version, message) in specs.items():
|
||||
versions[param] = version
|
||||
messages[param] = _sanitize_restructured_text(message)
|
||||
version_warnings[param] = (
|
||||
exc.RemovedIn20Warning
|
||||
if version == "2.0"
|
||||
else exc.SADeprecationWarning
|
||||
)
|
||||
|
||||
def decorate(fn):
|
||||
spec = compat.inspect_getfullargspec(fn)
|
||||
|
||||
if spec.defaults is not None:
|
||||
defaults = dict(
|
||||
zip(
|
||||
spec.args[(len(spec.args) - len(spec.defaults)) :],
|
||||
spec.defaults,
|
||||
)
|
||||
)
|
||||
check_defaults = set(defaults).intersection(messages)
|
||||
check_kw = set(messages).difference(defaults)
|
||||
else:
|
||||
check_defaults = ()
|
||||
check_kw = set(messages)
|
||||
|
||||
check_any_kw = spec.varkw
|
||||
|
||||
@decorator
|
||||
def warned(fn, *args, **kwargs):
|
||||
for m in check_defaults:
|
||||
if (defaults[m] is None and kwargs[m] is not None) or (
|
||||
defaults[m] is not None and kwargs[m] != defaults[m]
|
||||
):
|
||||
_warn_with_version(
|
||||
messages[m],
|
||||
versions[m],
|
||||
version_warnings[m],
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
if check_any_kw in messages and set(kwargs).difference(
|
||||
check_defaults
|
||||
):
|
||||
|
||||
_warn_with_version(
|
||||
messages[check_any_kw],
|
||||
versions[check_any_kw],
|
||||
version_warnings[check_any_kw],
|
||||
stacklevel=3,
|
||||
)
|
||||
|
||||
for m in check_kw:
|
||||
if m in kwargs:
|
||||
_warn_with_version(
|
||||
messages[m],
|
||||
versions[m],
|
||||
version_warnings[m],
|
||||
stacklevel=3,
|
||||
)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
doc = fn.__doc__ is not None and fn.__doc__ or ""
|
||||
if doc:
|
||||
doc = inject_param_text(
|
||||
doc,
|
||||
{
|
||||
param: ".. deprecated:: %s %s"
|
||||
% ("1.4" if version == "2.0" else version, (message or ""))
|
||||
for param, (version, message) in specs.items()
|
||||
},
|
||||
)
|
||||
decorated = warned(fn)
|
||||
decorated.__doc__ = doc
|
||||
return decorated
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def _sanitize_restructured_text(text):
|
||||
def repl(m):
|
||||
type_, name = m.group(1, 2)
|
||||
if type_ in ("func", "meth"):
|
||||
name += "()"
|
||||
return name
|
||||
|
||||
text = re.sub(r":ref:`(.+) <.*>`", lambda m: '"%s"' % m.group(1), text)
|
||||
return re.sub(r"\:(\w+)\:`~?(?:_\w+)?\.?(.+?)`", repl, text)
|
||||
|
||||
|
||||
def _decorate_cls_with_warning(
|
||||
cls, constructor, wtype, message, version, docstring_header=None
|
||||
):
|
||||
doc = cls.__doc__ is not None and cls.__doc__ or ""
|
||||
if docstring_header is not None:
|
||||
|
||||
if constructor is not None:
|
||||
docstring_header %= dict(func=constructor)
|
||||
|
||||
if issubclass(wtype, exc.Base20DeprecationWarning):
|
||||
docstring_header += (
|
||||
" (Background on SQLAlchemy 2.0 at: "
|
||||
":ref:`migration_20_toplevel`)"
|
||||
)
|
||||
doc = inject_docstring_text(doc, docstring_header, 1)
|
||||
|
||||
if type(cls) is type:
|
||||
clsdict = dict(cls.__dict__)
|
||||
clsdict["__doc__"] = doc
|
||||
clsdict.pop("__dict__", None)
|
||||
clsdict.pop("__weakref__", None)
|
||||
cls = type(cls.__name__, cls.__bases__, clsdict)
|
||||
if constructor is not None:
|
||||
constructor_fn = clsdict[constructor]
|
||||
|
||||
else:
|
||||
cls.__doc__ = doc
|
||||
if constructor is not None:
|
||||
constructor_fn = getattr(cls, constructor)
|
||||
|
||||
if constructor is not None:
|
||||
setattr(
|
||||
cls,
|
||||
constructor,
|
||||
_decorate_with_warning(
|
||||
constructor_fn, wtype, message, version, None
|
||||
),
|
||||
)
|
||||
return cls
|
||||
|
||||
|
||||
def _decorate_with_warning(
|
||||
func, wtype, message, version, docstring_header=None, enable_warnings=True
|
||||
):
|
||||
"""Wrap a function with a warnings.warn and augmented docstring."""
|
||||
|
||||
message = _sanitize_restructured_text(message)
|
||||
|
||||
if issubclass(wtype, exc.Base20DeprecationWarning):
|
||||
doc_only = (
|
||||
" (Background on SQLAlchemy 2.0 at: "
|
||||
":ref:`migration_20_toplevel`)"
|
||||
)
|
||||
else:
|
||||
doc_only = ""
|
||||
|
||||
@decorator
|
||||
def warned(fn, *args, **kwargs):
|
||||
skip_warning = not enable_warnings or kwargs.pop(
|
||||
"_sa_skip_warning", False
|
||||
)
|
||||
if not skip_warning:
|
||||
_warn_with_version(message, version, wtype, stacklevel=3)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
doc = func.__doc__ is not None and func.__doc__ or ""
|
||||
if docstring_header is not None:
|
||||
docstring_header %= dict(func=func.__name__)
|
||||
|
||||
docstring_header += doc_only
|
||||
|
||||
doc = inject_docstring_text(doc, docstring_header, 1)
|
||||
|
||||
decorated = warned(func)
|
||||
decorated.__doc__ = doc
|
||||
decorated._sa_warn = lambda: _warn_with_version(
|
||||
message, version, wtype, stacklevel=3
|
||||
)
|
||||
return decorated
|
||||
1945
lib/sqlalchemy/util/langhelpers.py
Normal file
1945
lib/sqlalchemy/util/langhelpers.py
Normal file
File diff suppressed because it is too large
Load Diff
291
lib/sqlalchemy/util/queue.py
Normal file
291
lib/sqlalchemy/util/queue.py
Normal file
@@ -0,0 +1,291 @@
|
||||
# util/queue.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""An adaptation of Py2.3/2.4's Queue module which supports reentrant
|
||||
behavior, using RLock instead of Lock for its mutex object. The
|
||||
Queue object is used exclusively by the sqlalchemy.pool.QueuePool
|
||||
class.
|
||||
|
||||
This is to support the connection pool's usage of weakref callbacks to return
|
||||
connections to the underlying Queue, which can in extremely
|
||||
rare cases be invoked within the ``get()`` method of the Queue itself,
|
||||
producing a ``put()`` inside the ``get()`` and therefore a reentrant
|
||||
condition.
|
||||
|
||||
"""
|
||||
|
||||
from collections import deque
|
||||
from time import time as _time
|
||||
|
||||
from . import compat
|
||||
from .compat import threading
|
||||
from .concurrency import asyncio
|
||||
from .concurrency import await_fallback
|
||||
from .concurrency import await_only
|
||||
from .langhelpers import memoized_property
|
||||
|
||||
|
||||
__all__ = ["Empty", "Full", "Queue"]
|
||||
|
||||
|
||||
class Empty(Exception):
|
||||
"Exception raised by Queue.get(block=0)/get_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Full(Exception):
|
||||
"Exception raised by Queue.put(block=0)/put_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Queue:
|
||||
def __init__(self, maxsize=0, use_lifo=False):
|
||||
"""Initialize a queue object with a given maximum size.
|
||||
|
||||
If `maxsize` is <= 0, the queue size is infinite.
|
||||
|
||||
If `use_lifo` is True, this Queue acts like a Stack (LIFO).
|
||||
"""
|
||||
|
||||
self._init(maxsize)
|
||||
# mutex must be held whenever the queue is mutating. All methods
|
||||
# that acquire mutex must release it before returning. mutex
|
||||
# is shared between the two conditions, so acquiring and
|
||||
# releasing the conditions also acquires and releases mutex.
|
||||
self.mutex = threading.RLock()
|
||||
# Notify not_empty whenever an item is added to the queue; a
|
||||
# thread waiting to get is notified then.
|
||||
self.not_empty = threading.Condition(self.mutex)
|
||||
# Notify not_full whenever an item is removed from the queue;
|
||||
# a thread waiting to put is notified then.
|
||||
self.not_full = threading.Condition(self.mutex)
|
||||
# If this queue uses LIFO or FIFO
|
||||
self.use_lifo = use_lifo
|
||||
|
||||
def qsize(self):
|
||||
"""Return the approximate size of the queue (not reliable!)."""
|
||||
|
||||
with self.mutex:
|
||||
return self._qsize()
|
||||
|
||||
def empty(self):
|
||||
"""Return True if the queue is empty, False otherwise (not
|
||||
reliable!)."""
|
||||
|
||||
with self.mutex:
|
||||
return self._empty()
|
||||
|
||||
def full(self):
|
||||
"""Return True if the queue is full, False otherwise (not
|
||||
reliable!)."""
|
||||
|
||||
with self.mutex:
|
||||
return self._full()
|
||||
|
||||
def put(self, item, block=True, timeout=None):
|
||||
"""Put an item into the queue.
|
||||
|
||||
If optional args `block` is True and `timeout` is None (the
|
||||
default), block if necessary until a free slot is
|
||||
available. If `timeout` is a positive number, it blocks at
|
||||
most `timeout` seconds and raises the ``Full`` exception if no
|
||||
free slot was available within that time. Otherwise (`block`
|
||||
is false), put an item on the queue if a free slot is
|
||||
immediately available, else raise the ``Full`` exception
|
||||
(`timeout` is ignored in that case).
|
||||
"""
|
||||
|
||||
with self.not_full:
|
||||
if not block:
|
||||
if self._full():
|
||||
raise Full
|
||||
elif timeout is None:
|
||||
while self._full():
|
||||
self.not_full.wait()
|
||||
else:
|
||||
if timeout < 0:
|
||||
raise ValueError("'timeout' must be a positive number")
|
||||
endtime = _time() + timeout
|
||||
while self._full():
|
||||
remaining = endtime - _time()
|
||||
if remaining <= 0.0:
|
||||
raise Full
|
||||
self.not_full.wait(remaining)
|
||||
self._put(item)
|
||||
self.not_empty.notify()
|
||||
|
||||
def put_nowait(self, item):
|
||||
"""Put an item into the queue without blocking.
|
||||
|
||||
Only enqueue the item if a free slot is immediately available.
|
||||
Otherwise raise the ``Full`` exception.
|
||||
"""
|
||||
return self.put(item, False)
|
||||
|
||||
def get(self, block=True, timeout=None):
|
||||
"""Remove and return an item from the queue.
|
||||
|
||||
If optional args `block` is True and `timeout` is None (the
|
||||
default), block if necessary until an item is available. If
|
||||
`timeout` is a positive number, it blocks at most `timeout`
|
||||
seconds and raises the ``Empty`` exception if no item was
|
||||
available within that time. Otherwise (`block` is false),
|
||||
return an item if one is immediately available, else raise the
|
||||
``Empty`` exception (`timeout` is ignored in that case).
|
||||
|
||||
"""
|
||||
with self.not_empty:
|
||||
if not block:
|
||||
if self._empty():
|
||||
raise Empty
|
||||
elif timeout is None:
|
||||
while self._empty():
|
||||
self.not_empty.wait()
|
||||
else:
|
||||
if timeout < 0:
|
||||
raise ValueError("'timeout' must be a positive number")
|
||||
endtime = _time() + timeout
|
||||
while self._empty():
|
||||
remaining = endtime - _time()
|
||||
if remaining <= 0.0:
|
||||
raise Empty
|
||||
self.not_empty.wait(remaining)
|
||||
item = self._get()
|
||||
self.not_full.notify()
|
||||
return item
|
||||
|
||||
def get_nowait(self):
|
||||
"""Remove and return an item from the queue without blocking.
|
||||
|
||||
Only get an item if one is immediately available. Otherwise
|
||||
raise the ``Empty`` exception.
|
||||
"""
|
||||
|
||||
return self.get(False)
|
||||
|
||||
# Override these methods to implement other queue organizations
|
||||
# (e.g. stack or priority queue).
|
||||
# These will only be called with appropriate locks held
|
||||
|
||||
# Initialize the queue representation
|
||||
def _init(self, maxsize):
|
||||
self.maxsize = maxsize
|
||||
self.queue = deque()
|
||||
|
||||
def _qsize(self):
|
||||
return len(self.queue)
|
||||
|
||||
# Check whether the queue is empty
|
||||
def _empty(self):
|
||||
return not self.queue
|
||||
|
||||
# Check whether the queue is full
|
||||
def _full(self):
|
||||
return self.maxsize > 0 and len(self.queue) == self.maxsize
|
||||
|
||||
# Put a new item in the queue
|
||||
def _put(self, item):
|
||||
self.queue.append(item)
|
||||
|
||||
# Get an item from the queue
|
||||
def _get(self):
|
||||
if self.use_lifo:
|
||||
# LIFO
|
||||
return self.queue.pop()
|
||||
else:
|
||||
# FIFO
|
||||
return self.queue.popleft()
|
||||
|
||||
|
||||
class AsyncAdaptedQueue:
|
||||
await_ = staticmethod(await_only)
|
||||
|
||||
def __init__(self, maxsize=0, use_lifo=False):
|
||||
self.use_lifo = use_lifo
|
||||
self.maxsize = maxsize
|
||||
|
||||
def empty(self):
|
||||
return self._queue.empty()
|
||||
|
||||
def full(self):
|
||||
return self._queue.full()
|
||||
|
||||
def qsize(self):
|
||||
return self._queue.qsize()
|
||||
|
||||
@memoized_property
|
||||
def _queue(self):
|
||||
# Delay creation of the queue until it is first used, to avoid
|
||||
# binding it to a possibly wrong event loop.
|
||||
# By delaying the creation of the pool we accommodate the common
|
||||
# usage pattern of instantiating the engine at module level, where a
|
||||
# different event loop is in present compared to when the application
|
||||
# is actually run.
|
||||
|
||||
if self.use_lifo:
|
||||
queue = asyncio.LifoQueue(maxsize=self.maxsize)
|
||||
else:
|
||||
queue = asyncio.Queue(maxsize=self.maxsize)
|
||||
return queue
|
||||
|
||||
def put_nowait(self, item):
|
||||
try:
|
||||
return self._queue.put_nowait(item)
|
||||
except asyncio.QueueFull as err:
|
||||
compat.raise_(
|
||||
Full(),
|
||||
replace_context=err,
|
||||
)
|
||||
|
||||
def put(self, item, block=True, timeout=None):
|
||||
if not block:
|
||||
return self.put_nowait(item)
|
||||
|
||||
try:
|
||||
if timeout is not None:
|
||||
return self.await_(
|
||||
asyncio.wait_for(self._queue.put(item), timeout)
|
||||
)
|
||||
else:
|
||||
return self.await_(self._queue.put(item))
|
||||
except (asyncio.QueueFull, asyncio.TimeoutError) as err:
|
||||
compat.raise_(
|
||||
Full(),
|
||||
replace_context=err,
|
||||
)
|
||||
|
||||
def get_nowait(self):
|
||||
try:
|
||||
return self._queue.get_nowait()
|
||||
except asyncio.QueueEmpty as err:
|
||||
compat.raise_(
|
||||
Empty(),
|
||||
replace_context=err,
|
||||
)
|
||||
|
||||
def get(self, block=True, timeout=None):
|
||||
if not block:
|
||||
return self.get_nowait()
|
||||
|
||||
try:
|
||||
if timeout is not None:
|
||||
return self.await_(
|
||||
asyncio.wait_for(self._queue.get(), timeout)
|
||||
)
|
||||
else:
|
||||
return self.await_(self._queue.get())
|
||||
except (asyncio.QueueEmpty, asyncio.TimeoutError) as err:
|
||||
compat.raise_(
|
||||
Empty(),
|
||||
replace_context=err,
|
||||
)
|
||||
|
||||
|
||||
class FallbackAsyncAdaptedQueue(AsyncAdaptedQueue):
|
||||
await_ = staticmethod(await_fallback)
|
||||
100
lib/sqlalchemy/util/topological.py
Normal file
100
lib/sqlalchemy/util/topological.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# util/topological.py
|
||||
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Topological sorting algorithms."""
|
||||
|
||||
from .. import util
|
||||
from ..exc import CircularDependencyError
|
||||
|
||||
__all__ = ["sort", "sort_as_subsets", "find_cycles"]
|
||||
|
||||
|
||||
def sort_as_subsets(tuples, allitems):
|
||||
|
||||
edges = util.defaultdict(set)
|
||||
for parent, child in tuples:
|
||||
edges[child].add(parent)
|
||||
|
||||
todo = list(allitems)
|
||||
todo_set = set(allitems)
|
||||
|
||||
while todo_set:
|
||||
output = []
|
||||
for node in todo:
|
||||
if todo_set.isdisjoint(edges[node]):
|
||||
output.append(node)
|
||||
|
||||
if not output:
|
||||
raise CircularDependencyError(
|
||||
"Circular dependency detected.",
|
||||
find_cycles(tuples, allitems),
|
||||
_gen_edges(edges),
|
||||
)
|
||||
|
||||
todo_set.difference_update(output)
|
||||
todo = [t for t in todo if t in todo_set]
|
||||
yield output
|
||||
|
||||
|
||||
def sort(tuples, allitems, deterministic_order=True):
|
||||
"""sort the given list of items by dependency.
|
||||
|
||||
'tuples' is a list of tuples representing a partial ordering.
|
||||
|
||||
deterministic_order is no longer used, the order is now always
|
||||
deterministic given the order of "allitems". the flag is there
|
||||
for backwards compatibility with Alembic.
|
||||
|
||||
"""
|
||||
|
||||
for set_ in sort_as_subsets(tuples, allitems):
|
||||
for s in set_:
|
||||
yield s
|
||||
|
||||
|
||||
def find_cycles(tuples, allitems):
|
||||
# adapted from:
|
||||
# https://neopythonic.blogspot.com/2009/01/detecting-cycles-in-directed-graph.html
|
||||
|
||||
edges = util.defaultdict(set)
|
||||
for parent, child in tuples:
|
||||
edges[parent].add(child)
|
||||
nodes_to_test = set(edges)
|
||||
|
||||
output = set()
|
||||
|
||||
# we'd like to find all nodes that are
|
||||
# involved in cycles, so we do the full
|
||||
# pass through the whole thing for each
|
||||
# node in the original list.
|
||||
|
||||
# we can go just through parent edge nodes.
|
||||
# if a node is only a child and never a parent,
|
||||
# by definition it can't be part of a cycle. same
|
||||
# if it's not in the edges at all.
|
||||
for node in nodes_to_test:
|
||||
stack = [node]
|
||||
todo = nodes_to_test.difference(stack)
|
||||
while stack:
|
||||
top = stack[-1]
|
||||
for node in edges[top]:
|
||||
if node in stack:
|
||||
cyc = stack[stack.index(node) :]
|
||||
todo.difference_update(cyc)
|
||||
output.update(cyc)
|
||||
|
||||
if node in todo:
|
||||
stack.append(node)
|
||||
todo.remove(node)
|
||||
break
|
||||
else:
|
||||
node = stack.pop()
|
||||
return output
|
||||
|
||||
|
||||
def _gen_edges(edges):
|
||||
return set([(right, left) for left in edges for right in edges[left]])
|
||||
Reference in New Issue
Block a user