chore: 添加虚拟环境到仓库

- 添加 backend_service/venv 虚拟环境
- 包含所有Python依赖包
- 注意:虚拟环境约393MB,包含12655个文件
This commit is contained in:
2025-12-03 10:19:25 +08:00
parent a6c2027caa
commit c4f851d387
12655 changed files with 3009376 additions and 0 deletions

View File

@@ -0,0 +1,30 @@
# coding:utf-8
"""
Function decoration for backoff and retry
This module provides function decorators which can be used to wrap a
function such that it will be retried until some condition is met. It
is meant to be of use when accessing unreliable resources with the
potential for intermittent failures i.e. network resources and external
APIs. Somewhat more generally, it may also be of use for dynamically
polling resources for externally generated content.
For examples and full documentation see the README at
https://github.com/litl/backoff
"""
from backoff._decorator import on_exception, on_predicate
from backoff._jitter import full_jitter, random_jitter
from backoff._wait_gen import constant, expo, fibo, runtime
__all__ = [
'on_predicate',
'on_exception',
'constant',
'expo',
'fibo',
'runtime',
'full_jitter',
'random_jitter',
]
__version__ = "2.2.1"

View File

@@ -0,0 +1,188 @@
# coding:utf-8
import datetime
import functools
import asyncio
from datetime import timedelta
from backoff._common import (_init_wait_gen, _maybe_call, _next_wait)
def _ensure_coroutine(coro_or_func):
if asyncio.iscoroutinefunction(coro_or_func):
return coro_or_func
else:
@functools.wraps(coro_or_func)
async def f(*args, **kwargs):
return coro_or_func(*args, **kwargs)
return f
def _ensure_coroutines(coros_or_funcs):
return [_ensure_coroutine(f) for f in coros_or_funcs]
async def _call_handlers(handlers,
*,
target, args, kwargs, tries, elapsed,
**extra):
details = {
'target': target,
'args': args,
'kwargs': kwargs,
'tries': tries,
'elapsed': elapsed,
}
details.update(extra)
for handler in handlers:
await handler(details)
def retry_predicate(target, wait_gen, predicate,
*,
max_tries, max_time, jitter,
on_success, on_backoff, on_giveup,
wait_gen_kwargs):
on_success = _ensure_coroutines(on_success)
on_backoff = _ensure_coroutines(on_backoff)
on_giveup = _ensure_coroutines(on_giveup)
# Easy to implement, please report if you need this.
assert not asyncio.iscoroutinefunction(max_tries)
assert not asyncio.iscoroutinefunction(jitter)
assert asyncio.iscoroutinefunction(target)
@functools.wraps(target)
async def retry(*args, **kwargs):
# update variables from outer function args
max_tries_value = _maybe_call(max_tries)
max_time_value = _maybe_call(max_time)
tries = 0
start = datetime.datetime.now()
wait = _init_wait_gen(wait_gen, wait_gen_kwargs)
while True:
tries += 1
elapsed = timedelta.total_seconds(datetime.datetime.now() - start)
details = {
"target": target,
"args": args,
"kwargs": kwargs,
"tries": tries,
"elapsed": elapsed,
}
ret = await target(*args, **kwargs)
if predicate(ret):
max_tries_exceeded = (tries == max_tries_value)
max_time_exceeded = (max_time_value is not None and
elapsed >= max_time_value)
if max_tries_exceeded or max_time_exceeded:
await _call_handlers(on_giveup, **details, value=ret)
break
try:
seconds = _next_wait(wait, ret, jitter, elapsed,
max_time_value)
except StopIteration:
await _call_handlers(on_giveup, **details, value=ret)
break
await _call_handlers(on_backoff, **details, value=ret,
wait=seconds)
# Note: there is no convenient way to pass explicit event
# loop to decorator, so here we assume that either default
# thread event loop is set and correct (it mostly is
# by default), or Python >= 3.5.3 or Python >= 3.6 is used
# where loop.get_event_loop() in coroutine guaranteed to
# return correct value.
# See for details:
# <https://groups.google.com/forum/#!topic/python-tulip/yF9C-rFpiKk>
# <https://bugs.python.org/issue28613>
await asyncio.sleep(seconds)
continue
else:
await _call_handlers(on_success, **details, value=ret)
break
return ret
return retry
def retry_exception(target, wait_gen, exception,
*,
max_tries, max_time, jitter, giveup,
on_success, on_backoff, on_giveup, raise_on_giveup,
wait_gen_kwargs):
on_success = _ensure_coroutines(on_success)
on_backoff = _ensure_coroutines(on_backoff)
on_giveup = _ensure_coroutines(on_giveup)
giveup = _ensure_coroutine(giveup)
# Easy to implement, please report if you need this.
assert not asyncio.iscoroutinefunction(max_tries)
assert not asyncio.iscoroutinefunction(jitter)
@functools.wraps(target)
async def retry(*args, **kwargs):
max_tries_value = _maybe_call(max_tries)
max_time_value = _maybe_call(max_time)
tries = 0
start = datetime.datetime.now()
wait = _init_wait_gen(wait_gen, wait_gen_kwargs)
while True:
tries += 1
elapsed = timedelta.total_seconds(datetime.datetime.now() - start)
details = {
"target": target,
"args": args,
"kwargs": kwargs,
"tries": tries,
"elapsed": elapsed,
}
try:
ret = await target(*args, **kwargs)
except exception as e:
giveup_result = await giveup(e)
max_tries_exceeded = (tries == max_tries_value)
max_time_exceeded = (max_time_value is not None and
elapsed >= max_time_value)
if giveup_result or max_tries_exceeded or max_time_exceeded:
await _call_handlers(on_giveup, **details, exception=e)
if raise_on_giveup:
raise
return None
try:
seconds = _next_wait(wait, e, jitter, elapsed,
max_time_value)
except StopIteration:
await _call_handlers(on_giveup, **details, exception=e)
raise e
await _call_handlers(on_backoff, **details, wait=seconds,
exception=e)
# Note: there is no convenient way to pass explicit event
# loop to decorator, so here we assume that either default
# thread event loop is set and correct (it mostly is
# by default), or Python >= 3.5.3 or Python >= 3.6 is used
# where loop.get_event_loop() in coroutine guaranteed to
# return correct value.
# See for details:
# <https://groups.google.com/forum/#!topic/python-tulip/yF9C-rFpiKk>
# <https://bugs.python.org/issue28613>
await asyncio.sleep(seconds)
else:
await _call_handlers(on_success, **details)
return ret
return retry

View File

@@ -0,0 +1,120 @@
# coding:utf-8
import functools
import logging
import sys
import traceback
import warnings
# Use module-specific logger with a default null handler.
_logger = logging.getLogger('backoff')
_logger.addHandler(logging.NullHandler()) # pragma: no cover
_logger.setLevel(logging.INFO)
# Evaluate arg that can be either a fixed value or a callable.
def _maybe_call(f, *args, **kwargs):
if callable(f):
try:
return f(*args, **kwargs)
except TypeError:
return f
else:
return f
def _init_wait_gen(wait_gen, wait_gen_kwargs):
kwargs = {k: _maybe_call(v) for k, v in wait_gen_kwargs.items()}
initialized = wait_gen(**kwargs)
initialized.send(None) # Initialize with an empty send
return initialized
def _next_wait(wait, send_value, jitter, elapsed, max_time):
value = wait.send(send_value)
try:
if jitter is not None:
seconds = jitter(value)
else:
seconds = value
except TypeError:
warnings.warn(
"Nullary jitter function signature is deprecated. Use "
"unary signature accepting a wait value in seconds and "
"returning a jittered version of it.",
DeprecationWarning,
stacklevel=2,
)
seconds = value + jitter()
# don't sleep longer than remaining allotted max_time
if max_time is not None:
seconds = min(seconds, max_time - elapsed)
return seconds
def _prepare_logger(logger):
if isinstance(logger, str):
logger = logging.getLogger(logger)
return logger
# Configure handler list with user specified handler and optionally
# with a default handler bound to the specified logger.
def _config_handlers(
user_handlers, *, default_handler=None, logger=None, log_level=None
):
handlers = []
if logger is not None:
assert log_level is not None, "Log level is not specified"
# bind the specified logger to the default log handler
log_handler = functools.partial(
default_handler, logger=logger, log_level=log_level
)
handlers.append(log_handler)
if user_handlers is None:
return handlers
# user specified handlers can either be an iterable of handlers
# or a single handler. either way append them to the list.
if hasattr(user_handlers, '__iter__'):
# add all handlers in the iterable
handlers += list(user_handlers)
else:
# append a single handler
handlers.append(user_handlers)
return handlers
# Default backoff handler
def _log_backoff(details, logger, log_level):
msg = "Backing off %s(...) for %.1fs (%s)"
log_args = [details['target'].__name__, details['wait']]
exc_typ, exc, _ = sys.exc_info()
if exc is not None:
exc_fmt = traceback.format_exception_only(exc_typ, exc)[-1]
log_args.append(exc_fmt.rstrip("\n"))
else:
log_args.append(details['value'])
logger.log(log_level, msg, *log_args)
# Default giveup handler
def _log_giveup(details, logger, log_level):
msg = "Giving up %s(...) after %d tries (%s)"
log_args = [details['target'].__name__, details['tries']]
exc_typ, exc, _ = sys.exc_info()
if exc is not None:
exc_fmt = traceback.format_exception_only(exc_typ, exc)[-1]
log_args.append(exc_fmt.rstrip("\n"))
else:
log_args.append(details['value'])
logger.log(log_level, msg, *log_args)

View File

@@ -0,0 +1,222 @@
# coding:utf-8
import asyncio
import logging
import operator
from typing import Any, Callable, Iterable, Optional, Type, Union
from backoff._common import (
_prepare_logger,
_config_handlers,
_log_backoff,
_log_giveup
)
from backoff._jitter import full_jitter
from backoff import _async, _sync
from backoff._typing import (
_CallableT,
_Handler,
_Jitterer,
_MaybeCallable,
_MaybeLogger,
_MaybeSequence,
_Predicate,
_WaitGenerator,
)
def on_predicate(wait_gen: _WaitGenerator,
predicate: _Predicate[Any] = operator.not_,
*,
max_tries: Optional[_MaybeCallable[int]] = None,
max_time: Optional[_MaybeCallable[float]] = None,
jitter: Union[_Jitterer, None] = full_jitter,
on_success: Union[_Handler, Iterable[_Handler], None] = None,
on_backoff: Union[_Handler, Iterable[_Handler], None] = None,
on_giveup: Union[_Handler, Iterable[_Handler], None] = None,
logger: _MaybeLogger = 'backoff',
backoff_log_level: int = logging.INFO,
giveup_log_level: int = logging.ERROR,
**wait_gen_kwargs: Any) -> Callable[[_CallableT], _CallableT]:
"""Returns decorator for backoff and retry triggered by predicate.
Args:
wait_gen: A generator yielding successive wait times in
seconds.
predicate: A function which when called on the return value of
the target function will trigger backoff when considered
truthily. If not specified, the default behavior is to
backoff on falsey return values.
max_tries: The maximum number of attempts to make before giving
up. In the case of failure, the result of the last attempt
will be returned. The default value of None means there
is no limit to the number of tries. If a callable is passed,
it will be evaluated at runtime and its return value used.
max_time: The maximum total amount of time to try for before
giving up. If this time expires, the result of the last
attempt will be returned. If a callable is passed, it will
be evaluated at runtime and its return value used.
jitter: A function of the value yielded by wait_gen returning
the actual time to wait. This distributes wait times
stochastically in order to avoid timing collisions across
concurrent clients. Wait times are jittered by default
using the full_jitter function. Jittering may be disabled
altogether by passing jitter=None.
on_success: Callable (or iterable of callables) with a unary
signature to be called in the event of success. The
parameter is a dict containing details about the invocation.
on_backoff: Callable (or iterable of callables) with a unary
signature to be called in the event of a backoff. The
parameter is a dict containing details about the invocation.
on_giveup: Callable (or iterable of callables) with a unary
signature to be called in the event that max_tries
is exceeded. The parameter is a dict containing details
about the invocation.
logger: Name of logger or Logger object to log to. Defaults to
'backoff'.
backoff_log_level: log level for the backoff event. Defaults to "INFO"
giveup_log_level: log level for the give up event. Defaults to "ERROR"
**wait_gen_kwargs: Any additional keyword args specified will be
passed to wait_gen when it is initialized. Any callable
args will first be evaluated and their return values passed.
This is useful for runtime configuration.
"""
def decorate(target):
nonlocal logger, on_success, on_backoff, on_giveup
logger = _prepare_logger(logger)
on_success = _config_handlers(on_success)
on_backoff = _config_handlers(
on_backoff,
default_handler=_log_backoff,
logger=logger,
log_level=backoff_log_level
)
on_giveup = _config_handlers(
on_giveup,
default_handler=_log_giveup,
logger=logger,
log_level=giveup_log_level
)
if asyncio.iscoroutinefunction(target):
retry = _async.retry_predicate
else:
retry = _sync.retry_predicate
return retry(
target,
wait_gen,
predicate,
max_tries=max_tries,
max_time=max_time,
jitter=jitter,
on_success=on_success,
on_backoff=on_backoff,
on_giveup=on_giveup,
wait_gen_kwargs=wait_gen_kwargs
)
# Return a function which decorates a target with a retry loop.
return decorate
def on_exception(wait_gen: _WaitGenerator,
exception: _MaybeSequence[Type[Exception]],
*,
max_tries: Optional[_MaybeCallable[int]] = None,
max_time: Optional[_MaybeCallable[float]] = None,
jitter: Union[_Jitterer, None] = full_jitter,
giveup: _Predicate[Exception] = lambda e: False,
on_success: Union[_Handler, Iterable[_Handler], None] = None,
on_backoff: Union[_Handler, Iterable[_Handler], None] = None,
on_giveup: Union[_Handler, Iterable[_Handler], None] = None,
raise_on_giveup: bool = True,
logger: _MaybeLogger = 'backoff',
backoff_log_level: int = logging.INFO,
giveup_log_level: int = logging.ERROR,
**wait_gen_kwargs: Any) -> Callable[[_CallableT], _CallableT]:
"""Returns decorator for backoff and retry triggered by exception.
Args:
wait_gen: A generator yielding successive wait times in
seconds.
exception: An exception type (or tuple of types) which triggers
backoff.
max_tries: The maximum number of attempts to make before giving
up. Once exhausted, the exception will be allowed to escape.
The default value of None means there is no limit to the
number of tries. If a callable is passed, it will be
evaluated at runtime and its return value used.
max_time: The maximum total amount of time to try for before
giving up. Once expired, the exception will be allowed to
escape. If a callable is passed, it will be
evaluated at runtime and its return value used.
jitter: A function of the value yielded by wait_gen returning
the actual time to wait. This distributes wait times
stochastically in order to avoid timing collisions across
concurrent clients. Wait times are jittered by default
using the full_jitter function. Jittering may be disabled
altogether by passing jitter=None.
giveup: Function accepting an exception instance and
returning whether or not to give up. Optional. The default
is to always continue.
on_success: Callable (or iterable of callables) with a unary
signature to be called in the event of success. The
parameter is a dict containing details about the invocation.
on_backoff: Callable (or iterable of callables) with a unary
signature to be called in the event of a backoff. The
parameter is a dict containing details about the invocation.
on_giveup: Callable (or iterable of callables) with a unary
signature to be called in the event that max_tries
is exceeded. The parameter is a dict containing details
about the invocation.
raise_on_giveup: Boolean indicating whether the registered exceptions
should be raised on giveup. Defaults to `True`
logger: Name or Logger object to log to. Defaults to 'backoff'.
backoff_log_level: log level for the backoff event. Defaults to "INFO"
giveup_log_level: log level for the give up event. Defaults to "ERROR"
**wait_gen_kwargs: Any additional keyword args specified will be
passed to wait_gen when it is initialized. Any callable
args will first be evaluated and their return values passed.
This is useful for runtime configuration.
"""
def decorate(target):
nonlocal logger, on_success, on_backoff, on_giveup
logger = _prepare_logger(logger)
on_success = _config_handlers(on_success)
on_backoff = _config_handlers(
on_backoff,
default_handler=_log_backoff,
logger=logger,
log_level=backoff_log_level,
)
on_giveup = _config_handlers(
on_giveup,
default_handler=_log_giveup,
logger=logger,
log_level=giveup_log_level,
)
if asyncio.iscoroutinefunction(target):
retry = _async.retry_exception
else:
retry = _sync.retry_exception
return retry(
target,
wait_gen,
exception,
max_tries=max_tries,
max_time=max_time,
jitter=jitter,
giveup=giveup,
on_success=on_success,
on_backoff=on_backoff,
on_giveup=on_giveup,
raise_on_giveup=raise_on_giveup,
wait_gen_kwargs=wait_gen_kwargs
)
# Return a function which decorates a target with a retry loop.
return decorate

View File

@@ -0,0 +1,28 @@
# coding:utf-8
import random
def random_jitter(value: float) -> float:
"""Jitter the value a random number of milliseconds.
This adds up to 1 second of additional time to the original value.
Prior to backoff version 1.2 this was the default jitter behavior.
Args:
value: The unadulterated backoff value.
"""
return value + random.random()
def full_jitter(value: float) -> float:
"""Jitter the value across the full range (0 to value).
This corresponds to the "Full Jitter" algorithm specified in the
AWS blog's post on the performance of various jitter algorithms.
(http://www.awsarchitectureblog.com/2015/03/backoff.html)
Args:
value: The unadulterated backoff value.
"""
return random.uniform(0, value)

View File

@@ -0,0 +1,132 @@
# coding:utf-8
import datetime
import functools
import time
from datetime import timedelta
from backoff._common import (_init_wait_gen, _maybe_call, _next_wait)
def _call_handlers(hdlrs, target, args, kwargs, tries, elapsed, **extra):
details = {
'target': target,
'args': args,
'kwargs': kwargs,
'tries': tries,
'elapsed': elapsed,
}
details.update(extra)
for hdlr in hdlrs:
hdlr(details)
def retry_predicate(target, wait_gen, predicate,
*,
max_tries, max_time, jitter,
on_success, on_backoff, on_giveup,
wait_gen_kwargs):
@functools.wraps(target)
def retry(*args, **kwargs):
max_tries_value = _maybe_call(max_tries)
max_time_value = _maybe_call(max_time)
tries = 0
start = datetime.datetime.now()
wait = _init_wait_gen(wait_gen, wait_gen_kwargs)
while True:
tries += 1
elapsed = timedelta.total_seconds(datetime.datetime.now() - start)
details = {
"target": target,
"args": args,
"kwargs": kwargs,
"tries": tries,
"elapsed": elapsed,
}
ret = target(*args, **kwargs)
if predicate(ret):
max_tries_exceeded = (tries == max_tries_value)
max_time_exceeded = (max_time_value is not None and
elapsed >= max_time_value)
if max_tries_exceeded or max_time_exceeded:
_call_handlers(on_giveup, **details, value=ret)
break
try:
seconds = _next_wait(wait, ret, jitter, elapsed,
max_time_value)
except StopIteration:
_call_handlers(on_giveup, **details)
break
_call_handlers(on_backoff, **details,
value=ret, wait=seconds)
time.sleep(seconds)
continue
else:
_call_handlers(on_success, **details, value=ret)
break
return ret
return retry
def retry_exception(target, wait_gen, exception,
*,
max_tries, max_time, jitter, giveup,
on_success, on_backoff, on_giveup, raise_on_giveup,
wait_gen_kwargs):
@functools.wraps(target)
def retry(*args, **kwargs):
max_tries_value = _maybe_call(max_tries)
max_time_value = _maybe_call(max_time)
tries = 0
start = datetime.datetime.now()
wait = _init_wait_gen(wait_gen, wait_gen_kwargs)
while True:
tries += 1
elapsed = timedelta.total_seconds(datetime.datetime.now() - start)
details = {
"target": target,
"args": args,
"kwargs": kwargs,
"tries": tries,
"elapsed": elapsed,
}
try:
ret = target(*args, **kwargs)
except exception as e:
max_tries_exceeded = (tries == max_tries_value)
max_time_exceeded = (max_time_value is not None and
elapsed >= max_time_value)
if giveup(e) or max_tries_exceeded or max_time_exceeded:
_call_handlers(on_giveup, **details, exception=e)
if raise_on_giveup:
raise
return None
try:
seconds = _next_wait(wait, e, jitter, elapsed,
max_time_value)
except StopIteration:
_call_handlers(on_giveup, **details, exception=e)
raise e
_call_handlers(on_backoff, **details, wait=seconds,
exception=e)
time.sleep(seconds)
else:
_call_handlers(on_success, **details)
return ret
return retry

View File

@@ -0,0 +1,44 @@
# coding:utf-8
import logging
import sys
from typing import (Any, Callable, Coroutine, Dict, Generator, Sequence, Tuple,
TypeVar, Union)
if sys.version_info >= (3, 8): # pragma: no cover
from typing import TypedDict
else: # pragma: no cover
# use typing_extensions if installed but don't require it
try:
from typing_extensions import TypedDict
except ImportError:
class TypedDict(dict):
def __init_subclass__(cls, **kwargs: Any) -> None:
return super().__init_subclass__()
class _Details(TypedDict):
target: Callable[..., Any]
args: Tuple[Any, ...]
kwargs: Dict[str, Any]
tries: int
elapsed: float
class Details(_Details, total=False):
wait: float # present in the on_backoff handler case for either decorator
value: Any # present in the on_predicate decorator case
T = TypeVar("T")
_CallableT = TypeVar('_CallableT', bound=Callable[..., Any])
_Handler = Union[
Callable[[Details], None],
Callable[[Details], Coroutine[Any, Any, None]],
]
_Jitterer = Callable[[float], float]
_MaybeCallable = Union[T, Callable[[], T]]
_MaybeLogger = Union[str, logging.Logger, None]
_MaybeSequence = Union[T, Sequence[T]]
_Predicate = Callable[[T], bool]
_WaitGenerator = Callable[..., Generator[float, None, None]]

View File

@@ -0,0 +1,89 @@
# coding:utf-8
import itertools
from typing import Any, Callable, Generator, Iterable, Optional, Union
def expo(
base: float = 2,
factor: float = 1,
max_value: Optional[float] = None
) -> Generator[float, Any, None]:
"""Generator for exponential decay.
Args:
base: The mathematical base of the exponentiation operation
factor: Factor to multiply the exponentiation by.
max_value: The maximum value to yield. Once the value in the
true exponential sequence exceeds this, the value
of max_value will forever after be yielded.
"""
# Advance past initial .send() call
yield # type: ignore[misc]
n = 0
while True:
a = factor * base ** n
if max_value is None or a < max_value:
yield a
n += 1
else:
yield max_value
def fibo(max_value: Optional[int] = None) -> Generator[int, None, None]:
"""Generator for fibonaccial decay.
Args:
max_value: The maximum value to yield. Once the value in the
true fibonacci sequence exceeds this, the value
of max_value will forever after be yielded.
"""
# Advance past initial .send() call
yield # type: ignore[misc]
a = 1
b = 1
while True:
if max_value is None or a < max_value:
yield a
a, b = b, a + b
else:
yield max_value
def constant(
interval: Union[int, Iterable[float]] = 1
) -> Generator[float, None, None]:
"""Generator for constant intervals.
Args:
interval: A constant value to yield or an iterable of such values.
"""
# Advance past initial .send() call
yield # type: ignore[misc]
try:
itr = iter(interval) # type: ignore
except TypeError:
itr = itertools.repeat(interval) # type: ignore
for val in itr:
yield val
def runtime(
*,
value: Callable[[Any], float]
) -> Generator[float, None, None]:
"""Generator that is based on parsing the return value or thrown
exception of the decorated method
Args:
value: a callable which takes as input the decorated
function's return value or thrown exception and
determines how long to wait
"""
ret_or_exc = yield # type: ignore[misc]
while True:
ret_or_exc = yield value(ret_or_exc)

View File

@@ -0,0 +1,6 @@
# coding:utf-8
from ._typing import Details
__all__ = [
'Details'
]