chore: 添加虚拟环境到仓库
- 添加 backend_service/venv 虚拟环境 - 包含所有Python依赖包 - 注意:虚拟环境约393MB,包含12655个文件
This commit is contained in:
@@ -0,0 +1,67 @@
|
||||
from ._logs import SensitiveHeadersFilter as SensitiveHeadersFilter
|
||||
from ._sync import asyncify as asyncify
|
||||
from ._proxy import LazyProxy as LazyProxy
|
||||
from ._utils import (
|
||||
flatten as flatten,
|
||||
is_dict as is_dict,
|
||||
is_list as is_list,
|
||||
is_given as is_given,
|
||||
is_tuple as is_tuple,
|
||||
json_safe as json_safe,
|
||||
lru_cache as lru_cache,
|
||||
is_mapping as is_mapping,
|
||||
is_tuple_t as is_tuple_t,
|
||||
is_iterable as is_iterable,
|
||||
is_sequence as is_sequence,
|
||||
coerce_float as coerce_float,
|
||||
is_mapping_t as is_mapping_t,
|
||||
removeprefix as removeprefix,
|
||||
removesuffix as removesuffix,
|
||||
extract_files as extract_files,
|
||||
is_sequence_t as is_sequence_t,
|
||||
required_args as required_args,
|
||||
coerce_boolean as coerce_boolean,
|
||||
coerce_integer as coerce_integer,
|
||||
file_from_path as file_from_path,
|
||||
is_azure_client as is_azure_client,
|
||||
strip_not_given as strip_not_given,
|
||||
deepcopy_minimal as deepcopy_minimal,
|
||||
get_async_library as get_async_library,
|
||||
maybe_coerce_float as maybe_coerce_float,
|
||||
get_required_header as get_required_header,
|
||||
maybe_coerce_boolean as maybe_coerce_boolean,
|
||||
maybe_coerce_integer as maybe_coerce_integer,
|
||||
is_async_azure_client as is_async_azure_client,
|
||||
)
|
||||
from ._compat import (
|
||||
get_args as get_args,
|
||||
is_union as is_union,
|
||||
get_origin as get_origin,
|
||||
is_typeddict as is_typeddict,
|
||||
is_literal_type as is_literal_type,
|
||||
)
|
||||
from ._typing import (
|
||||
is_list_type as is_list_type,
|
||||
is_union_type as is_union_type,
|
||||
extract_type_arg as extract_type_arg,
|
||||
is_iterable_type as is_iterable_type,
|
||||
is_required_type as is_required_type,
|
||||
is_sequence_type as is_sequence_type,
|
||||
is_annotated_type as is_annotated_type,
|
||||
is_type_alias_type as is_type_alias_type,
|
||||
strip_annotated_type as strip_annotated_type,
|
||||
extract_type_var_from_base as extract_type_var_from_base,
|
||||
)
|
||||
from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator
|
||||
from ._transform import (
|
||||
PropertyInfo as PropertyInfo,
|
||||
transform as transform,
|
||||
async_transform as async_transform,
|
||||
maybe_transform as maybe_transform,
|
||||
async_maybe_transform as async_maybe_transform,
|
||||
)
|
||||
from ._reflection import (
|
||||
function_has_argument as function_has_argument,
|
||||
assert_signatures_in_sync as assert_signatures_in_sync,
|
||||
)
|
||||
from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, Type, Union, Literal, Optional
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import get_args as _get_args, get_origin as _get_origin
|
||||
|
||||
from .._types import StrBytesIntFloat
|
||||
from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime
|
||||
|
||||
_LITERAL_TYPES = {Literal, typing_extensions.Literal}
|
||||
|
||||
|
||||
def get_args(tp: type[Any]) -> tuple[Any, ...]:
|
||||
return _get_args(tp)
|
||||
|
||||
|
||||
def get_origin(tp: type[Any]) -> type[Any] | None:
|
||||
return _get_origin(tp)
|
||||
|
||||
|
||||
def is_union(tp: Optional[Type[Any]]) -> bool:
|
||||
if sys.version_info < (3, 10):
|
||||
return tp is Union # type: ignore[comparison-overlap]
|
||||
else:
|
||||
import types
|
||||
|
||||
return tp is Union or tp is types.UnionType
|
||||
|
||||
|
||||
def is_typeddict(tp: Type[Any]) -> bool:
|
||||
return typing_extensions.is_typeddict(tp)
|
||||
|
||||
|
||||
def is_literal_type(tp: Type[Any]) -> bool:
|
||||
return get_origin(tp) in _LITERAL_TYPES
|
||||
|
||||
|
||||
def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
|
||||
return _parse_date(value)
|
||||
|
||||
|
||||
def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
|
||||
return _parse_datetime(value)
|
||||
@@ -0,0 +1,136 @@
|
||||
"""
|
||||
This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py
|
||||
without the Pydantic v1 specific errors.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import Dict, Union, Optional
|
||||
from datetime import date, datetime, timezone, timedelta
|
||||
|
||||
from .._types import StrBytesIntFloat
|
||||
|
||||
date_expr = r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
|
||||
time_expr = (
|
||||
r"(?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
|
||||
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
|
||||
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
|
||||
)
|
||||
|
||||
date_re = re.compile(f"{date_expr}$")
|
||||
datetime_re = re.compile(f"{date_expr}[T ]{time_expr}")
|
||||
|
||||
|
||||
EPOCH = datetime(1970, 1, 1)
|
||||
# if greater than this, the number is in ms, if less than or equal it's in seconds
|
||||
# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
|
||||
MS_WATERSHED = int(2e10)
|
||||
# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
|
||||
MAX_NUMBER = int(3e20)
|
||||
|
||||
|
||||
def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
|
||||
if isinstance(value, (int, float)):
|
||||
return value
|
||||
try:
|
||||
return float(value)
|
||||
except ValueError:
|
||||
return None
|
||||
except TypeError:
|
||||
raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None
|
||||
|
||||
|
||||
def _from_unix_seconds(seconds: Union[int, float]) -> datetime:
|
||||
if seconds > MAX_NUMBER:
|
||||
return datetime.max
|
||||
elif seconds < -MAX_NUMBER:
|
||||
return datetime.min
|
||||
|
||||
while abs(seconds) > MS_WATERSHED:
|
||||
seconds /= 1000
|
||||
dt = EPOCH + timedelta(seconds=seconds)
|
||||
return dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]:
|
||||
if value == "Z":
|
||||
return timezone.utc
|
||||
elif value is not None:
|
||||
offset_mins = int(value[-2:]) if len(value) > 3 else 0
|
||||
offset = 60 * int(value[1:3]) + offset_mins
|
||||
if value[0] == "-":
|
||||
offset = -offset
|
||||
return timezone(timedelta(minutes=offset))
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
|
||||
"""
|
||||
Parse a datetime/int/float/string and return a datetime.datetime.
|
||||
|
||||
This function supports time zone offsets. When the input contains one,
|
||||
the output uses a timezone with a fixed offset from UTC.
|
||||
|
||||
Raise ValueError if the input is well formatted but not a valid datetime.
|
||||
Raise ValueError if the input isn't well formatted.
|
||||
"""
|
||||
if isinstance(value, datetime):
|
||||
return value
|
||||
|
||||
number = _get_numeric(value, "datetime")
|
||||
if number is not None:
|
||||
return _from_unix_seconds(number)
|
||||
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode()
|
||||
|
||||
assert not isinstance(value, (float, int))
|
||||
|
||||
match = datetime_re.match(value)
|
||||
if match is None:
|
||||
raise ValueError("invalid datetime format")
|
||||
|
||||
kw = match.groupdict()
|
||||
if kw["microsecond"]:
|
||||
kw["microsecond"] = kw["microsecond"].ljust(6, "0")
|
||||
|
||||
tzinfo = _parse_timezone(kw.pop("tzinfo"))
|
||||
kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
|
||||
kw_["tzinfo"] = tzinfo
|
||||
|
||||
return datetime(**kw_) # type: ignore
|
||||
|
||||
|
||||
def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
|
||||
"""
|
||||
Parse a date/int/float/string and return a datetime.date.
|
||||
|
||||
Raise ValueError if the input is well formatted but not a valid date.
|
||||
Raise ValueError if the input isn't well formatted.
|
||||
"""
|
||||
if isinstance(value, date):
|
||||
if isinstance(value, datetime):
|
||||
return value.date()
|
||||
else:
|
||||
return value
|
||||
|
||||
number = _get_numeric(value, "date")
|
||||
if number is not None:
|
||||
return _from_unix_seconds(number).date()
|
||||
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode()
|
||||
|
||||
assert not isinstance(value, (float, int))
|
||||
match = date_re.match(value)
|
||||
if match is None:
|
||||
raise ValueError("invalid date format")
|
||||
|
||||
kw = {k: int(v) for k, v in match.groupdict().items()}
|
||||
|
||||
try:
|
||||
return date(**kw)
|
||||
except ValueError:
|
||||
raise ValueError("invalid date format") from None
|
||||
@@ -0,0 +1,42 @@
|
||||
import os
|
||||
import logging
|
||||
from typing_extensions import override
|
||||
|
||||
from ._utils import is_dict
|
||||
|
||||
logger: logging.Logger = logging.getLogger("openai")
|
||||
httpx_logger: logging.Logger = logging.getLogger("httpx")
|
||||
|
||||
|
||||
SENSITIVE_HEADERS = {"api-key", "authorization"}
|
||||
|
||||
|
||||
def _basic_config() -> None:
|
||||
# e.g. [2023-10-05 14:12:26 - openai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK"
|
||||
logging.basicConfig(
|
||||
format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
)
|
||||
|
||||
|
||||
def setup_logging() -> None:
|
||||
env = os.environ.get("OPENAI_LOG")
|
||||
if env == "debug":
|
||||
_basic_config()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
httpx_logger.setLevel(logging.DEBUG)
|
||||
elif env == "info":
|
||||
_basic_config()
|
||||
logger.setLevel(logging.INFO)
|
||||
httpx_logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class SensitiveHeadersFilter(logging.Filter):
|
||||
@override
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
if is_dict(record.args) and "headers" in record.args and is_dict(record.args["headers"]):
|
||||
headers = record.args["headers"] = {**record.args["headers"]}
|
||||
for header in headers:
|
||||
if str(header).lower() in SENSITIVE_HEADERS:
|
||||
headers[header] = "<redacted>"
|
||||
return True
|
||||
@@ -0,0 +1,65 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Generic, TypeVar, Iterable, cast
|
||||
from typing_extensions import override
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class LazyProxy(Generic[T], ABC):
|
||||
"""Implements data methods to pretend that an instance is another instance.
|
||||
|
||||
This includes forwarding attribute access and other methods.
|
||||
"""
|
||||
|
||||
# Note: we have to special case proxies that themselves return proxies
|
||||
# to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz`
|
||||
|
||||
def __getattr__(self, attr: str) -> object:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied # pyright: ignore
|
||||
return getattr(proxied, attr)
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied.__class__.__name__
|
||||
return repr(self.__get_proxied__())
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return proxied.__class__.__name__
|
||||
return str(proxied)
|
||||
|
||||
@override
|
||||
def __dir__(self) -> Iterable[str]:
|
||||
proxied = self.__get_proxied__()
|
||||
if isinstance(proxied, LazyProxy):
|
||||
return []
|
||||
return proxied.__dir__()
|
||||
|
||||
@property # type: ignore
|
||||
@override
|
||||
def __class__(self) -> type: # pyright: ignore
|
||||
try:
|
||||
proxied = self.__get_proxied__()
|
||||
except Exception:
|
||||
return type(self)
|
||||
if issubclass(type(proxied), LazyProxy):
|
||||
return type(proxied)
|
||||
return proxied.__class__
|
||||
|
||||
def __get_proxied__(self) -> T:
|
||||
return self.__load__()
|
||||
|
||||
def __as_proxied__(self) -> T:
|
||||
"""Helper method that returns the current proxy, typed as the loaded object"""
|
||||
return cast(T, self)
|
||||
|
||||
@abstractmethod
|
||||
def __load__(self) -> T: ...
|
||||
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool:
|
||||
"""Returns whether or not the given function has a specific parameter"""
|
||||
sig = inspect.signature(func)
|
||||
return arg_name in sig.parameters
|
||||
|
||||
|
||||
def assert_signatures_in_sync(
|
||||
source_func: Callable[..., Any],
|
||||
check_func: Callable[..., Any],
|
||||
*,
|
||||
exclude_params: set[str] = set(),
|
||||
description: str = "",
|
||||
) -> None:
|
||||
"""Ensure that the signature of the second function matches the first."""
|
||||
|
||||
check_sig = inspect.signature(check_func)
|
||||
source_sig = inspect.signature(source_func)
|
||||
|
||||
errors: list[str] = []
|
||||
|
||||
for name, source_param in source_sig.parameters.items():
|
||||
if name in exclude_params:
|
||||
continue
|
||||
|
||||
custom_param = check_sig.parameters.get(name)
|
||||
if not custom_param:
|
||||
errors.append(f"the `{name}` param is missing")
|
||||
continue
|
||||
|
||||
if custom_param.annotation != source_param.annotation:
|
||||
errors.append(
|
||||
f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}"
|
||||
)
|
||||
continue
|
||||
|
||||
if errors:
|
||||
raise AssertionError(
|
||||
f"{len(errors)} errors encountered when comparing signatures{description}:\n\n" + "\n\n".join(errors)
|
||||
)
|
||||
@@ -0,0 +1,24 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing_extensions import override
|
||||
|
||||
from ._proxy import LazyProxy
|
||||
|
||||
|
||||
class ResourcesProxy(LazyProxy[Any]):
|
||||
"""A proxy for the `openai.resources` module.
|
||||
|
||||
This is used so that we can lazily import `openai.resources` only when
|
||||
needed *and* so that users can just import `openai` and reference `openai.resources`
|
||||
"""
|
||||
|
||||
@override
|
||||
def __load__(self) -> Any:
|
||||
import importlib
|
||||
|
||||
mod = importlib.import_module("openai.resources")
|
||||
return mod
|
||||
|
||||
|
||||
resources = ResourcesProxy().__as_proxied__()
|
||||
@@ -0,0 +1,12 @@
|
||||
from typing import Any
|
||||
from typing_extensions import Iterator, AsyncIterator
|
||||
|
||||
|
||||
def consume_sync_iterator(iterator: Iterator[Any]) -> None:
|
||||
for _ in iterator:
|
||||
...
|
||||
|
||||
|
||||
async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None:
|
||||
async for _ in iterator:
|
||||
...
|
||||
@@ -0,0 +1,58 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
from typing import TypeVar, Callable, Awaitable
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
import anyio
|
||||
import sniffio
|
||||
import anyio.to_thread
|
||||
|
||||
T_Retval = TypeVar("T_Retval")
|
||||
T_ParamSpec = ParamSpec("T_ParamSpec")
|
||||
|
||||
|
||||
async def to_thread(
|
||||
func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
|
||||
) -> T_Retval:
|
||||
if sniffio.current_async_library() == "asyncio":
|
||||
return await asyncio.to_thread(func, *args, **kwargs)
|
||||
|
||||
return await anyio.to_thread.run_sync(
|
||||
functools.partial(func, *args, **kwargs),
|
||||
)
|
||||
|
||||
|
||||
# inspired by `asyncer`, https://github.com/tiangolo/asyncer
|
||||
def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
|
||||
"""
|
||||
Take a blocking function and create an async one that receives the same
|
||||
positional and keyword arguments.
|
||||
|
||||
Usage:
|
||||
|
||||
```python
|
||||
def blocking_func(arg1, arg2, kwarg1=None):
|
||||
# blocking code
|
||||
return result
|
||||
|
||||
|
||||
result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1)
|
||||
```
|
||||
|
||||
## Arguments
|
||||
|
||||
`function`: a blocking regular callable (e.g. a function)
|
||||
|
||||
## Return
|
||||
|
||||
An async function that takes the same positional and keyword arguments as the
|
||||
original one, that when called runs the same original function in a thread worker
|
||||
and returns the result.
|
||||
"""
|
||||
|
||||
async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
|
||||
return await to_thread(function, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
@@ -0,0 +1,457 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import base64
|
||||
import pathlib
|
||||
from typing import Any, Mapping, TypeVar, cast
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import Literal, get_args, override, get_type_hints as _get_type_hints
|
||||
|
||||
import anyio
|
||||
import pydantic
|
||||
|
||||
from ._utils import (
|
||||
is_list,
|
||||
is_given,
|
||||
lru_cache,
|
||||
is_mapping,
|
||||
is_iterable,
|
||||
is_sequence,
|
||||
)
|
||||
from .._files import is_base64_file_input
|
||||
from ._compat import get_origin, is_typeddict
|
||||
from ._typing import (
|
||||
is_list_type,
|
||||
is_union_type,
|
||||
extract_type_arg,
|
||||
is_iterable_type,
|
||||
is_required_type,
|
||||
is_sequence_type,
|
||||
is_annotated_type,
|
||||
strip_annotated_type,
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
# TODO: support for drilling globals() and locals()
|
||||
# TODO: ensure works correctly with forward references in all cases
|
||||
|
||||
|
||||
PropertyFormat = Literal["iso8601", "base64", "custom"]
|
||||
|
||||
|
||||
class PropertyInfo:
|
||||
"""Metadata class to be used in Annotated types to provide information about a given type.
|
||||
|
||||
For example:
|
||||
|
||||
class MyParams(TypedDict):
|
||||
account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')]
|
||||
|
||||
This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API.
|
||||
"""
|
||||
|
||||
alias: str | None
|
||||
format: PropertyFormat | None
|
||||
format_template: str | None
|
||||
discriminator: str | None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
alias: str | None = None,
|
||||
format: PropertyFormat | None = None,
|
||||
format_template: str | None = None,
|
||||
discriminator: str | None = None,
|
||||
) -> None:
|
||||
self.alias = alias
|
||||
self.format = format
|
||||
self.format_template = format_template
|
||||
self.discriminator = discriminator
|
||||
|
||||
@override
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')"
|
||||
|
||||
|
||||
def maybe_transform(
|
||||
data: object,
|
||||
expected_type: object,
|
||||
) -> Any | None:
|
||||
"""Wrapper over `transform()` that allows `None` to be passed.
|
||||
|
||||
See `transform()` for more details.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
return transform(data, expected_type)
|
||||
|
||||
|
||||
# Wrapper over _transform_recursive providing fake types
|
||||
def transform(
|
||||
data: _T,
|
||||
expected_type: object,
|
||||
) -> _T:
|
||||
"""Transform dictionaries based off of type information from the given type, for example:
|
||||
|
||||
```py
|
||||
class Params(TypedDict, total=False):
|
||||
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
||||
|
||||
|
||||
transformed = transform({"card_id": "<my card ID>"}, Params)
|
||||
# {'cardID': '<my card ID>'}
|
||||
```
|
||||
|
||||
Any keys / data that does not have type information given will be included as is.
|
||||
|
||||
It should be noted that the transformations that this function does are not represented in the type system.
|
||||
"""
|
||||
transformed = _transform_recursive(data, annotation=cast(type, expected_type))
|
||||
return cast(_T, transformed)
|
||||
|
||||
|
||||
@lru_cache(maxsize=8096)
|
||||
def _get_annotated_type(type_: type) -> type | None:
|
||||
"""If the given type is an `Annotated` type then it is returned, if not `None` is returned.
|
||||
|
||||
This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]`
|
||||
"""
|
||||
if is_required_type(type_):
|
||||
# Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]`
|
||||
type_ = get_args(type_)[0]
|
||||
|
||||
if is_annotated_type(type_):
|
||||
return type_
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _maybe_transform_key(key: str, type_: type) -> str:
|
||||
"""Transform the given `data` based on the annotations provided in `type_`.
|
||||
|
||||
Note: this function only looks at `Annotated` types that contain `PropertyInfo` metadata.
|
||||
"""
|
||||
annotated_type = _get_annotated_type(type_)
|
||||
if annotated_type is None:
|
||||
# no `Annotated` definition for this type, no transformation needed
|
||||
return key
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.alias is not None:
|
||||
return annotation.alias
|
||||
|
||||
return key
|
||||
|
||||
|
||||
def _no_transform_needed(annotation: type) -> bool:
|
||||
return annotation == float or annotation == int
|
||||
|
||||
|
||||
def _transform_recursive(
|
||||
data: object,
|
||||
*,
|
||||
annotation: type,
|
||||
inner_type: type | None = None,
|
||||
) -> object:
|
||||
"""Transform the given data against the expected type.
|
||||
|
||||
Args:
|
||||
annotation: The direct type annotation given to the particular piece of data.
|
||||
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
||||
|
||||
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
||||
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
||||
the list can be transformed using the metadata from the container type.
|
||||
|
||||
Defaults to the same value as the `annotation` argument.
|
||||
"""
|
||||
from .._compat import model_dump
|
||||
|
||||
if inner_type is None:
|
||||
inner_type = annotation
|
||||
|
||||
stripped_type = strip_annotated_type(inner_type)
|
||||
origin = get_origin(stripped_type) or stripped_type
|
||||
if is_typeddict(stripped_type) and is_mapping(data):
|
||||
return _transform_typeddict(data, stripped_type)
|
||||
|
||||
if origin == dict and is_mapping(data):
|
||||
items_type = get_args(stripped_type)[1]
|
||||
return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
|
||||
|
||||
if (
|
||||
# List[T]
|
||||
(is_list_type(stripped_type) and is_list(data))
|
||||
# Iterable[T]
|
||||
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
||||
# Sequence[T]
|
||||
or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
|
||||
):
|
||||
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
|
||||
# intended as an iterable, so we don't transform it.
|
||||
if isinstance(data, dict):
|
||||
return cast(object, data)
|
||||
|
||||
inner_type = extract_type_arg(stripped_type, 0)
|
||||
if _no_transform_needed(inner_type):
|
||||
# for some types there is no need to transform anything, so we can get a small
|
||||
# perf boost from skipping that work.
|
||||
#
|
||||
# but we still need to convert to a list to ensure the data is json-serializable
|
||||
if is_list(data):
|
||||
return data
|
||||
return list(data)
|
||||
|
||||
return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
||||
|
||||
if is_union_type(stripped_type):
|
||||
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
||||
#
|
||||
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
||||
# in different subtypes.
|
||||
for subtype in get_args(stripped_type):
|
||||
data = _transform_recursive(data, annotation=annotation, inner_type=subtype)
|
||||
return data
|
||||
|
||||
if isinstance(data, pydantic.BaseModel):
|
||||
return model_dump(data, exclude_unset=True, mode="json", exclude=getattr(data, "__api_exclude__", None))
|
||||
|
||||
annotated_type = _get_annotated_type(annotation)
|
||||
if annotated_type is None:
|
||||
return data
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
||||
return _format_data(data, annotation.format, annotation.format_template)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
||||
if isinstance(data, (date, datetime)):
|
||||
if format_ == "iso8601":
|
||||
return data.isoformat()
|
||||
|
||||
if format_ == "custom" and format_template is not None:
|
||||
return data.strftime(format_template)
|
||||
|
||||
if format_ == "base64" and is_base64_file_input(data):
|
||||
binary: str | bytes | None = None
|
||||
|
||||
if isinstance(data, pathlib.Path):
|
||||
binary = data.read_bytes()
|
||||
elif isinstance(data, io.IOBase):
|
||||
binary = data.read()
|
||||
|
||||
if isinstance(binary, str): # type: ignore[unreachable]
|
||||
binary = binary.encode()
|
||||
|
||||
if not isinstance(binary, bytes):
|
||||
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
||||
|
||||
return base64.b64encode(binary).decode("ascii")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _transform_typeddict(
|
||||
data: Mapping[str, object],
|
||||
expected_type: type,
|
||||
) -> Mapping[str, object]:
|
||||
result: dict[str, object] = {}
|
||||
annotations = get_type_hints(expected_type, include_extras=True)
|
||||
for key, value in data.items():
|
||||
if not is_given(value):
|
||||
# we don't need to include omitted values here as they'll
|
||||
# be stripped out before the request is sent anyway
|
||||
continue
|
||||
|
||||
type_ = annotations.get(key)
|
||||
if type_ is None:
|
||||
# we do not have a type annotation for this field, leave it as is
|
||||
result[key] = value
|
||||
else:
|
||||
result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_)
|
||||
return result
|
||||
|
||||
|
||||
async def async_maybe_transform(
|
||||
data: object,
|
||||
expected_type: object,
|
||||
) -> Any | None:
|
||||
"""Wrapper over `async_transform()` that allows `None` to be passed.
|
||||
|
||||
See `async_transform()` for more details.
|
||||
"""
|
||||
if data is None:
|
||||
return None
|
||||
return await async_transform(data, expected_type)
|
||||
|
||||
|
||||
async def async_transform(
|
||||
data: _T,
|
||||
expected_type: object,
|
||||
) -> _T:
|
||||
"""Transform dictionaries based off of type information from the given type, for example:
|
||||
|
||||
```py
|
||||
class Params(TypedDict, total=False):
|
||||
card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
|
||||
|
||||
|
||||
transformed = transform({"card_id": "<my card ID>"}, Params)
|
||||
# {'cardID': '<my card ID>'}
|
||||
```
|
||||
|
||||
Any keys / data that does not have type information given will be included as is.
|
||||
|
||||
It should be noted that the transformations that this function does are not represented in the type system.
|
||||
"""
|
||||
transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type))
|
||||
return cast(_T, transformed)
|
||||
|
||||
|
||||
async def _async_transform_recursive(
|
||||
data: object,
|
||||
*,
|
||||
annotation: type,
|
||||
inner_type: type | None = None,
|
||||
) -> object:
|
||||
"""Transform the given data against the expected type.
|
||||
|
||||
Args:
|
||||
annotation: The direct type annotation given to the particular piece of data.
|
||||
This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
|
||||
|
||||
inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
|
||||
is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
|
||||
the list can be transformed using the metadata from the container type.
|
||||
|
||||
Defaults to the same value as the `annotation` argument.
|
||||
"""
|
||||
from .._compat import model_dump
|
||||
|
||||
if inner_type is None:
|
||||
inner_type = annotation
|
||||
|
||||
stripped_type = strip_annotated_type(inner_type)
|
||||
origin = get_origin(stripped_type) or stripped_type
|
||||
if is_typeddict(stripped_type) and is_mapping(data):
|
||||
return await _async_transform_typeddict(data, stripped_type)
|
||||
|
||||
if origin == dict and is_mapping(data):
|
||||
items_type = get_args(stripped_type)[1]
|
||||
return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
|
||||
|
||||
if (
|
||||
# List[T]
|
||||
(is_list_type(stripped_type) and is_list(data))
|
||||
# Iterable[T]
|
||||
or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
|
||||
# Sequence[T]
|
||||
or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str))
|
||||
):
|
||||
# dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
|
||||
# intended as an iterable, so we don't transform it.
|
||||
if isinstance(data, dict):
|
||||
return cast(object, data)
|
||||
|
||||
inner_type = extract_type_arg(stripped_type, 0)
|
||||
if _no_transform_needed(inner_type):
|
||||
# for some types there is no need to transform anything, so we can get a small
|
||||
# perf boost from skipping that work.
|
||||
#
|
||||
# but we still need to convert to a list to ensure the data is json-serializable
|
||||
if is_list(data):
|
||||
return data
|
||||
return list(data)
|
||||
|
||||
return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
|
||||
|
||||
if is_union_type(stripped_type):
|
||||
# For union types we run the transformation against all subtypes to ensure that everything is transformed.
|
||||
#
|
||||
# TODO: there may be edge cases where the same normalized field name will transform to two different names
|
||||
# in different subtypes.
|
||||
for subtype in get_args(stripped_type):
|
||||
data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype)
|
||||
return data
|
||||
|
||||
if isinstance(data, pydantic.BaseModel):
|
||||
return model_dump(data, exclude_unset=True, mode="json")
|
||||
|
||||
annotated_type = _get_annotated_type(annotation)
|
||||
if annotated_type is None:
|
||||
return data
|
||||
|
||||
# ignore the first argument as it is the actual type
|
||||
annotations = get_args(annotated_type)[1:]
|
||||
for annotation in annotations:
|
||||
if isinstance(annotation, PropertyInfo) and annotation.format is not None:
|
||||
return await _async_format_data(data, annotation.format, annotation.format_template)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
|
||||
if isinstance(data, (date, datetime)):
|
||||
if format_ == "iso8601":
|
||||
return data.isoformat()
|
||||
|
||||
if format_ == "custom" and format_template is not None:
|
||||
return data.strftime(format_template)
|
||||
|
||||
if format_ == "base64" and is_base64_file_input(data):
|
||||
binary: str | bytes | None = None
|
||||
|
||||
if isinstance(data, pathlib.Path):
|
||||
binary = await anyio.Path(data).read_bytes()
|
||||
elif isinstance(data, io.IOBase):
|
||||
binary = data.read()
|
||||
|
||||
if isinstance(binary, str): # type: ignore[unreachable]
|
||||
binary = binary.encode()
|
||||
|
||||
if not isinstance(binary, bytes):
|
||||
raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
|
||||
|
||||
return base64.b64encode(binary).decode("ascii")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def _async_transform_typeddict(
|
||||
data: Mapping[str, object],
|
||||
expected_type: type,
|
||||
) -> Mapping[str, object]:
|
||||
result: dict[str, object] = {}
|
||||
annotations = get_type_hints(expected_type, include_extras=True)
|
||||
for key, value in data.items():
|
||||
if not is_given(value):
|
||||
# we don't need to include omitted values here as they'll
|
||||
# be stripped out before the request is sent anyway
|
||||
continue
|
||||
|
||||
type_ = annotations.get(key)
|
||||
if type_ is None:
|
||||
# we do not have a type annotation for this field, leave it as is
|
||||
result[key] = value
|
||||
else:
|
||||
result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_)
|
||||
return result
|
||||
|
||||
|
||||
@lru_cache(maxsize=8096)
|
||||
def get_type_hints(
|
||||
obj: Any,
|
||||
globalns: dict[str, Any] | None = None,
|
||||
localns: Mapping[str, Any] | None = None,
|
||||
include_extras: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
return _get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras)
|
||||
@@ -0,0 +1,156 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import typing
|
||||
import typing_extensions
|
||||
from typing import Any, TypeVar, Iterable, cast
|
||||
from collections import abc as _c_abc
|
||||
from typing_extensions import (
|
||||
TypeIs,
|
||||
Required,
|
||||
Annotated,
|
||||
get_args,
|
||||
get_origin,
|
||||
)
|
||||
|
||||
from ._utils import lru_cache
|
||||
from .._types import InheritsGeneric
|
||||
from ._compat import is_union as _is_union
|
||||
|
||||
|
||||
def is_annotated_type(typ: type) -> bool:
|
||||
return get_origin(typ) == Annotated
|
||||
|
||||
|
||||
def is_list_type(typ: type) -> bool:
|
||||
return (get_origin(typ) or typ) == list
|
||||
|
||||
|
||||
def is_sequence_type(typ: type) -> bool:
|
||||
origin = get_origin(typ) or typ
|
||||
return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence
|
||||
|
||||
|
||||
def is_iterable_type(typ: type) -> bool:
|
||||
"""If the given type is `typing.Iterable[T]`"""
|
||||
origin = get_origin(typ) or typ
|
||||
return origin == Iterable or origin == _c_abc.Iterable
|
||||
|
||||
|
||||
def is_union_type(typ: type) -> bool:
|
||||
return _is_union(get_origin(typ))
|
||||
|
||||
|
||||
def is_required_type(typ: type) -> bool:
|
||||
return get_origin(typ) == Required
|
||||
|
||||
|
||||
def is_typevar(typ: type) -> bool:
|
||||
# type ignore is required because type checkers
|
||||
# think this expression will always return False
|
||||
return type(typ) == TypeVar # type: ignore
|
||||
|
||||
|
||||
_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,)
|
||||
if sys.version_info >= (3, 12):
|
||||
_TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType)
|
||||
|
||||
|
||||
def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]:
|
||||
"""Return whether the provided argument is an instance of `TypeAliasType`.
|
||||
|
||||
```python
|
||||
type Int = int
|
||||
is_type_alias_type(Int)
|
||||
# > True
|
||||
Str = TypeAliasType("Str", str)
|
||||
is_type_alias_type(Str)
|
||||
# > True
|
||||
```
|
||||
"""
|
||||
return isinstance(tp, _TYPE_ALIAS_TYPES)
|
||||
|
||||
|
||||
# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]]
|
||||
@lru_cache(maxsize=8096)
|
||||
def strip_annotated_type(typ: type) -> type:
|
||||
if is_required_type(typ) or is_annotated_type(typ):
|
||||
return strip_annotated_type(cast(type, get_args(typ)[0]))
|
||||
|
||||
return typ
|
||||
|
||||
|
||||
def extract_type_arg(typ: type, index: int) -> type:
|
||||
args = get_args(typ)
|
||||
try:
|
||||
return cast(type, args[index])
|
||||
except IndexError as err:
|
||||
raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err
|
||||
|
||||
|
||||
def extract_type_var_from_base(
|
||||
typ: type,
|
||||
*,
|
||||
generic_bases: tuple[type, ...],
|
||||
index: int,
|
||||
failure_message: str | None = None,
|
||||
) -> type:
|
||||
"""Given a type like `Foo[T]`, returns the generic type variable `T`.
|
||||
|
||||
This also handles the case where a concrete subclass is given, e.g.
|
||||
```py
|
||||
class MyResponse(Foo[bytes]):
|
||||
...
|
||||
|
||||
extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes
|
||||
```
|
||||
|
||||
And where a generic subclass is given:
|
||||
```py
|
||||
_T = TypeVar('_T')
|
||||
class MyResponse(Foo[_T]):
|
||||
...
|
||||
|
||||
extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes
|
||||
```
|
||||
"""
|
||||
cls = cast(object, get_origin(typ) or typ)
|
||||
if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains]
|
||||
# we're given the class directly
|
||||
return extract_type_arg(typ, index)
|
||||
|
||||
# if a subclass is given
|
||||
# ---
|
||||
# this is needed as __orig_bases__ is not present in the typeshed stubs
|
||||
# because it is intended to be for internal use only, however there does
|
||||
# not seem to be a way to resolve generic TypeVars for inherited subclasses
|
||||
# without using it.
|
||||
if isinstance(cls, InheritsGeneric):
|
||||
target_base_class: Any | None = None
|
||||
for base in cls.__orig_bases__:
|
||||
if base.__origin__ in generic_bases:
|
||||
target_base_class = base
|
||||
break
|
||||
|
||||
if target_base_class is None:
|
||||
raise RuntimeError(
|
||||
"Could not find the generic base class;\n"
|
||||
"This should never happen;\n"
|
||||
f"Does {cls} inherit from one of {generic_bases} ?"
|
||||
)
|
||||
|
||||
extracted = extract_type_arg(target_base_class, index)
|
||||
if is_typevar(extracted):
|
||||
# If the extracted type argument is itself a type variable
|
||||
# then that means the subclass itself is generic, so we have
|
||||
# to resolve the type argument from the class itself, not
|
||||
# the base class.
|
||||
#
|
||||
# Note: if there is more than 1 type argument, the subclass could
|
||||
# change the ordering of the type arguments, this is not currently
|
||||
# supported.
|
||||
return extract_type_arg(typ, index)
|
||||
|
||||
return extracted
|
||||
|
||||
raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}")
|
||||
@@ -0,0 +1,437 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import inspect
|
||||
import functools
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Tuple,
|
||||
Mapping,
|
||||
TypeVar,
|
||||
Callable,
|
||||
Iterable,
|
||||
Sequence,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
from pathlib import Path
|
||||
from datetime import date, datetime
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
import sniffio
|
||||
|
||||
from .._types import Omit, NotGiven, FileTypes, HeadersLike
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
|
||||
_MappingT = TypeVar("_MappingT", bound=Mapping[str, object])
|
||||
_SequenceT = TypeVar("_SequenceT", bound=Sequence[object])
|
||||
CallableT = TypeVar("CallableT", bound=Callable[..., Any])
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..lib.azure import AzureOpenAI, AsyncAzureOpenAI
|
||||
|
||||
|
||||
def flatten(t: Iterable[Iterable[_T]]) -> list[_T]:
|
||||
return [item for sublist in t for item in sublist]
|
||||
|
||||
|
||||
def extract_files(
|
||||
# TODO: this needs to take Dict but variance issues.....
|
||||
# create protocol type ?
|
||||
query: Mapping[str, object],
|
||||
*,
|
||||
paths: Sequence[Sequence[str]],
|
||||
) -> list[tuple[str, FileTypes]]:
|
||||
"""Recursively extract files from the given dictionary based on specified paths.
|
||||
|
||||
A path may look like this ['foo', 'files', '<array>', 'data'].
|
||||
|
||||
Note: this mutates the given dictionary.
|
||||
"""
|
||||
files: list[tuple[str, FileTypes]] = []
|
||||
for path in paths:
|
||||
files.extend(_extract_items(query, path, index=0, flattened_key=None))
|
||||
return files
|
||||
|
||||
|
||||
def _extract_items(
|
||||
obj: object,
|
||||
path: Sequence[str],
|
||||
*,
|
||||
index: int,
|
||||
flattened_key: str | None,
|
||||
) -> list[tuple[str, FileTypes]]:
|
||||
try:
|
||||
key = path[index]
|
||||
except IndexError:
|
||||
if not is_given(obj):
|
||||
# no value was provided - we can safely ignore
|
||||
return []
|
||||
|
||||
# cyclical import
|
||||
from .._files import assert_is_file_content
|
||||
|
||||
# We have exhausted the path, return the entry we found.
|
||||
assert flattened_key is not None
|
||||
|
||||
if is_list(obj):
|
||||
files: list[tuple[str, FileTypes]] = []
|
||||
for entry in obj:
|
||||
assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "")
|
||||
files.append((flattened_key + "[]", cast(FileTypes, entry)))
|
||||
return files
|
||||
|
||||
assert_is_file_content(obj, key=flattened_key)
|
||||
return [(flattened_key, cast(FileTypes, obj))]
|
||||
|
||||
index += 1
|
||||
if is_dict(obj):
|
||||
try:
|
||||
# We are at the last entry in the path so we must remove the field
|
||||
if (len(path)) == index:
|
||||
item = obj.pop(key)
|
||||
else:
|
||||
item = obj[key]
|
||||
except KeyError:
|
||||
# Key was not present in the dictionary, this is not indicative of an error
|
||||
# as the given path may not point to a required field. We also do not want
|
||||
# to enforce required fields as the API may differ from the spec in some cases.
|
||||
return []
|
||||
if flattened_key is None:
|
||||
flattened_key = key
|
||||
else:
|
||||
flattened_key += f"[{key}]"
|
||||
return _extract_items(
|
||||
item,
|
||||
path,
|
||||
index=index,
|
||||
flattened_key=flattened_key,
|
||||
)
|
||||
elif is_list(obj):
|
||||
if key != "<array>":
|
||||
return []
|
||||
|
||||
return flatten(
|
||||
[
|
||||
_extract_items(
|
||||
item,
|
||||
path,
|
||||
index=index,
|
||||
flattened_key=flattened_key + "[]" if flattened_key is not None else "[]",
|
||||
)
|
||||
for item in obj
|
||||
]
|
||||
)
|
||||
|
||||
# Something unexpected was passed, just ignore it.
|
||||
return []
|
||||
|
||||
|
||||
def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]:
|
||||
return not isinstance(obj, NotGiven) and not isinstance(obj, Omit)
|
||||
|
||||
|
||||
# Type safe methods for narrowing types with TypeVars.
|
||||
# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown],
|
||||
# however this cause Pyright to rightfully report errors. As we know we don't
|
||||
# care about the contained types we can safely use `object` in its place.
|
||||
#
|
||||
# There are two separate functions defined, `is_*` and `is_*_t` for different use cases.
|
||||
# `is_*` is for when you're dealing with an unknown input
|
||||
# `is_*_t` is for when you're narrowing a known union type to a specific subset
|
||||
|
||||
|
||||
def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]:
|
||||
return isinstance(obj, tuple)
|
||||
|
||||
|
||||
def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]:
|
||||
return isinstance(obj, tuple)
|
||||
|
||||
|
||||
def is_sequence(obj: object) -> TypeGuard[Sequence[object]]:
|
||||
return isinstance(obj, Sequence)
|
||||
|
||||
|
||||
def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]:
|
||||
return isinstance(obj, Sequence)
|
||||
|
||||
|
||||
def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]:
|
||||
return isinstance(obj, Mapping)
|
||||
|
||||
|
||||
def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]:
|
||||
return isinstance(obj, Mapping)
|
||||
|
||||
|
||||
def is_dict(obj: object) -> TypeGuard[dict[object, object]]:
|
||||
return isinstance(obj, dict)
|
||||
|
||||
|
||||
def is_list(obj: object) -> TypeGuard[list[object]]:
|
||||
return isinstance(obj, list)
|
||||
|
||||
|
||||
def is_iterable(obj: object) -> TypeGuard[Iterable[object]]:
|
||||
return isinstance(obj, Iterable)
|
||||
|
||||
|
||||
def deepcopy_minimal(item: _T) -> _T:
|
||||
"""Minimal reimplementation of copy.deepcopy() that will only copy certain object types:
|
||||
|
||||
- mappings, e.g. `dict`
|
||||
- list
|
||||
|
||||
This is done for performance reasons.
|
||||
"""
|
||||
if is_mapping(item):
|
||||
return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
|
||||
if is_list(item):
|
||||
return cast(_T, [deepcopy_minimal(entry) for entry in item])
|
||||
return item
|
||||
|
||||
|
||||
# copied from https://github.com/Rapptz/RoboDanny
|
||||
def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
|
||||
size = len(seq)
|
||||
if size == 0:
|
||||
return ""
|
||||
|
||||
if size == 1:
|
||||
return seq[0]
|
||||
|
||||
if size == 2:
|
||||
return f"{seq[0]} {final} {seq[1]}"
|
||||
|
||||
return delim.join(seq[:-1]) + f" {final} {seq[-1]}"
|
||||
|
||||
|
||||
def quote(string: str) -> str:
|
||||
"""Add single quotation marks around the given string. Does *not* do any escaping."""
|
||||
return f"'{string}'"
|
||||
|
||||
|
||||
def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]:
|
||||
"""Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function.
|
||||
|
||||
Useful for enforcing runtime validation of overloaded functions.
|
||||
|
||||
Example usage:
|
||||
```py
|
||||
@overload
|
||||
def foo(*, a: str) -> str: ...
|
||||
|
||||
|
||||
@overload
|
||||
def foo(*, b: bool) -> str: ...
|
||||
|
||||
|
||||
# This enforces the same constraints that a static type checker would
|
||||
# i.e. that either a or b must be passed to the function
|
||||
@required_args(["a"], ["b"])
|
||||
def foo(*, a: str | None = None, b: bool | None = None) -> str: ...
|
||||
```
|
||||
"""
|
||||
|
||||
def inner(func: CallableT) -> CallableT:
|
||||
params = inspect.signature(func).parameters
|
||||
positional = [
|
||||
name
|
||||
for name, param in params.items()
|
||||
if param.kind
|
||||
in {
|
||||
param.POSITIONAL_ONLY,
|
||||
param.POSITIONAL_OR_KEYWORD,
|
||||
}
|
||||
]
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args: object, **kwargs: object) -> object:
|
||||
given_params: set[str] = set()
|
||||
for i, _ in enumerate(args):
|
||||
try:
|
||||
given_params.add(positional[i])
|
||||
except IndexError:
|
||||
raise TypeError(
|
||||
f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given"
|
||||
) from None
|
||||
|
||||
for key in kwargs.keys():
|
||||
given_params.add(key)
|
||||
|
||||
for variant in variants:
|
||||
matches = all((param in given_params for param in variant))
|
||||
if matches:
|
||||
break
|
||||
else: # no break
|
||||
if len(variants) > 1:
|
||||
variations = human_join(
|
||||
["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants]
|
||||
)
|
||||
msg = f"Missing required arguments; Expected either {variations} arguments to be given"
|
||||
else:
|
||||
assert len(variants) > 0
|
||||
|
||||
# TODO: this error message is not deterministic
|
||||
missing = list(set(variants[0]) - given_params)
|
||||
if len(missing) > 1:
|
||||
msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}"
|
||||
else:
|
||||
msg = f"Missing required argument: {quote(missing[0])}"
|
||||
raise TypeError(msg)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper # type: ignore
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: None) -> None: ...
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def strip_not_given(obj: object) -> object: ...
|
||||
|
||||
|
||||
def strip_not_given(obj: object | None) -> object:
|
||||
"""Remove all top-level keys where their values are instances of `NotGiven`"""
|
||||
if obj is None:
|
||||
return None
|
||||
|
||||
if not is_mapping(obj):
|
||||
return obj
|
||||
|
||||
return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)}
|
||||
|
||||
|
||||
def coerce_integer(val: str) -> int:
|
||||
return int(val, base=10)
|
||||
|
||||
|
||||
def coerce_float(val: str) -> float:
|
||||
return float(val)
|
||||
|
||||
|
||||
def coerce_boolean(val: str) -> bool:
|
||||
return val == "true" or val == "1" or val == "on"
|
||||
|
||||
|
||||
def maybe_coerce_integer(val: str | None) -> int | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_integer(val)
|
||||
|
||||
|
||||
def maybe_coerce_float(val: str | None) -> float | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_float(val)
|
||||
|
||||
|
||||
def maybe_coerce_boolean(val: str | None) -> bool | None:
|
||||
if val is None:
|
||||
return None
|
||||
return coerce_boolean(val)
|
||||
|
||||
|
||||
def removeprefix(string: str, prefix: str) -> str:
|
||||
"""Remove a prefix from a string.
|
||||
|
||||
Backport of `str.removeprefix` for Python < 3.9
|
||||
"""
|
||||
if string.startswith(prefix):
|
||||
return string[len(prefix) :]
|
||||
return string
|
||||
|
||||
|
||||
def removesuffix(string: str, suffix: str) -> str:
|
||||
"""Remove a suffix from a string.
|
||||
|
||||
Backport of `str.removesuffix` for Python < 3.9
|
||||
"""
|
||||
if string.endswith(suffix):
|
||||
return string[: -len(suffix)]
|
||||
return string
|
||||
|
||||
|
||||
def file_from_path(path: str) -> FileTypes:
|
||||
contents = Path(path).read_bytes()
|
||||
file_name = os.path.basename(path)
|
||||
return (file_name, contents)
|
||||
|
||||
|
||||
def get_required_header(headers: HeadersLike, header: str) -> str:
|
||||
lower_header = header.lower()
|
||||
if is_mapping_t(headers):
|
||||
# mypy doesn't understand the type narrowing here
|
||||
for k, v in headers.items(): # type: ignore
|
||||
if k.lower() == lower_header and isinstance(v, str):
|
||||
return v
|
||||
|
||||
# to deal with the case where the header looks like Stainless-Event-Id
|
||||
intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize())
|
||||
|
||||
for normalized_header in [header, lower_header, header.upper(), intercaps_header]:
|
||||
value = headers.get(normalized_header)
|
||||
if value:
|
||||
return value
|
||||
|
||||
raise ValueError(f"Could not find {header} header")
|
||||
|
||||
|
||||
def get_async_library() -> str:
|
||||
try:
|
||||
return sniffio.current_async_library()
|
||||
except Exception:
|
||||
return "false"
|
||||
|
||||
|
||||
def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]:
|
||||
"""A version of functools.lru_cache that retains the type signature
|
||||
for the wrapped function arguments.
|
||||
"""
|
||||
wrapper = functools.lru_cache( # noqa: TID251
|
||||
maxsize=maxsize,
|
||||
)
|
||||
return cast(Any, wrapper) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def json_safe(data: object) -> object:
|
||||
"""Translates a mapping / sequence recursively in the same fashion
|
||||
as `pydantic` v2's `model_dump(mode="json")`.
|
||||
"""
|
||||
if is_mapping(data):
|
||||
return {json_safe(key): json_safe(value) for key, value in data.items()}
|
||||
|
||||
if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)):
|
||||
return [json_safe(item) for item in data]
|
||||
|
||||
if isinstance(data, (datetime, date)):
|
||||
return data.isoformat()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def is_azure_client(client: object) -> TypeGuard[AzureOpenAI]:
|
||||
from ..lib.azure import AzureOpenAI
|
||||
|
||||
return isinstance(client, AzureOpenAI)
|
||||
|
||||
|
||||
def is_async_azure_client(client: object) -> TypeGuard[AsyncAzureOpenAI]:
|
||||
from ..lib.azure import AsyncAzureOpenAI
|
||||
|
||||
return isinstance(client, AsyncAzureOpenAI)
|
||||
Reference in New Issue
Block a user