chore: 添加虚拟环境到仓库
- 添加 backend_service/venv 虚拟环境 - 包含所有Python依赖包 - 注意:虚拟环境约393MB,包含12655个文件
This commit is contained in:
@@ -0,0 +1,39 @@
|
||||
"""
|
||||
build - A simple, correct Python build frontend
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ._builder import ProjectBuilder
|
||||
from ._exceptions import (
|
||||
BuildBackendException,
|
||||
BuildException,
|
||||
BuildSystemTableValidationError,
|
||||
FailedProcessError,
|
||||
TypoWarning,
|
||||
)
|
||||
from ._types import ConfigSettings as ConfigSettingsType
|
||||
from ._types import Distribution as DistributionType
|
||||
from ._types import SubprocessRunner as RunnerType
|
||||
from ._util import check_dependency
|
||||
|
||||
|
||||
__version__ = '1.3.0'
|
||||
|
||||
__all__ = [
|
||||
'BuildBackendException',
|
||||
'BuildException',
|
||||
'BuildSystemTableValidationError',
|
||||
'ConfigSettingsType',
|
||||
'DistributionType',
|
||||
'FailedProcessError',
|
||||
'ProjectBuilder',
|
||||
'RunnerType',
|
||||
'TypoWarning',
|
||||
'__version__',
|
||||
'check_dependency',
|
||||
]
|
||||
|
||||
|
||||
def __dir__() -> list[str]:
|
||||
return __all__
|
||||
@@ -0,0 +1,486 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import contextvars
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import textwrap
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
from collections.abc import Iterator, Sequence
|
||||
from functools import partial
|
||||
from typing import NoReturn, TextIO
|
||||
|
||||
import build
|
||||
|
||||
from . import ProjectBuilder, _ctx
|
||||
from . import env as _env
|
||||
from ._exceptions import BuildBackendException, BuildException, FailedProcessError
|
||||
from ._types import ConfigSettings, Distribution, StrPath
|
||||
from .env import DefaultIsolatedEnv
|
||||
|
||||
|
||||
_COLORS = {
|
||||
'red': '\33[91m',
|
||||
'green': '\33[92m',
|
||||
'yellow': '\33[93m',
|
||||
'bold': '\33[1m',
|
||||
'dim': '\33[2m',
|
||||
'underline': '\33[4m',
|
||||
'reset': '\33[0m',
|
||||
}
|
||||
_NO_COLORS = dict.fromkeys(_COLORS, '')
|
||||
|
||||
|
||||
_styles = contextvars.ContextVar('_styles', default=_COLORS)
|
||||
|
||||
|
||||
def _init_colors() -> None:
|
||||
if 'NO_COLOR' in os.environ:
|
||||
if 'FORCE_COLOR' in os.environ:
|
||||
warnings.warn('Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color', stacklevel=2)
|
||||
_styles.set(_NO_COLORS)
|
||||
elif 'FORCE_COLOR' in os.environ or sys.stdout.isatty():
|
||||
return
|
||||
_styles.set(_NO_COLORS)
|
||||
|
||||
|
||||
def _cprint(fmt: str = '', msg: str = '', file: TextIO | None = None) -> None:
|
||||
print(fmt.format(msg, **_styles.get()), file=file, flush=True)
|
||||
|
||||
|
||||
def _showwarning(
|
||||
message: Warning | str,
|
||||
category: type[Warning],
|
||||
filename: str,
|
||||
lineno: int,
|
||||
file: TextIO | None = None,
|
||||
line: str | None = None,
|
||||
) -> None: # pragma: no cover
|
||||
_cprint('{yellow}WARNING{reset} {}', str(message))
|
||||
|
||||
|
||||
_max_terminal_width = shutil.get_terminal_size().columns - 2
|
||||
if _max_terminal_width <= 0: # pragma: no cover
|
||||
_max_terminal_width = 78
|
||||
|
||||
|
||||
_fill = partial(textwrap.fill, subsequent_indent=' ', width=_max_terminal_width)
|
||||
|
||||
|
||||
def _log(message: str, *, origin: tuple[str, ...] | None = None) -> None:
|
||||
if origin is None:
|
||||
(first, *rest) = message.splitlines()
|
||||
_cprint('{bold}{}{reset}', _fill(first, initial_indent='* '))
|
||||
for line in rest:
|
||||
print(_fill(line, initial_indent=' '))
|
||||
|
||||
elif origin[0] == 'subprocess':
|
||||
initial_indent = '> ' if origin[1] == 'cmd' else '< '
|
||||
file = sys.stderr if origin[1] == 'stderr' else None
|
||||
for line in message.splitlines():
|
||||
_cprint('{dim}{}{reset}', _fill(line, initial_indent=initial_indent), file=file)
|
||||
|
||||
|
||||
def _setup_cli(*, verbosity: int) -> None:
|
||||
warnings.showwarning = _showwarning
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
try:
|
||||
import colorama
|
||||
|
||||
colorama.init()
|
||||
except ModuleNotFoundError:
|
||||
pass
|
||||
|
||||
_init_colors()
|
||||
|
||||
_ctx.LOGGER.set(_log)
|
||||
_ctx.VERBOSITY.set(verbosity)
|
||||
|
||||
|
||||
def _error(msg: str, code: int = 1) -> NoReturn: # pragma: no cover
|
||||
"""
|
||||
Print an error message and exit. Will color the output when writing to a TTY.
|
||||
|
||||
:param msg: Error message
|
||||
:param code: Error code
|
||||
"""
|
||||
_cprint('{red}ERROR{reset} {}', msg)
|
||||
raise SystemExit(code)
|
||||
|
||||
|
||||
def _format_dep_chain(dep_chain: Sequence[str]) -> str:
|
||||
return ' -> '.join(dep.partition(';')[0].strip() for dep in dep_chain)
|
||||
|
||||
|
||||
def _build_in_isolated_env(
|
||||
srcdir: StrPath,
|
||||
outdir: StrPath,
|
||||
distribution: Distribution,
|
||||
config_settings: ConfigSettings | None,
|
||||
installer: _env.Installer,
|
||||
) -> str:
|
||||
with DefaultIsolatedEnv(installer=installer) as env:
|
||||
builder = ProjectBuilder.from_isolated_env(env, srcdir)
|
||||
# first install the build dependencies
|
||||
env.install(builder.build_system_requires)
|
||||
# then get the extra required dependencies from the backend (which was installed in the call above :P)
|
||||
env.install(builder.get_requires_for_build(distribution, config_settings or {}))
|
||||
return builder.build(distribution, outdir, config_settings or {})
|
||||
|
||||
|
||||
def _build_in_current_env(
|
||||
srcdir: StrPath,
|
||||
outdir: StrPath,
|
||||
distribution: Distribution,
|
||||
config_settings: ConfigSettings | None,
|
||||
skip_dependency_check: bool = False,
|
||||
) -> str:
|
||||
builder = ProjectBuilder(srcdir)
|
||||
|
||||
if not skip_dependency_check:
|
||||
missing = builder.check_dependencies(distribution, config_settings or {})
|
||||
if missing:
|
||||
dependencies = ''.join('\n\t' + dep for deps in missing for dep in (deps[0], _format_dep_chain(deps[1:])) if dep)
|
||||
_cprint()
|
||||
_error(f'Missing dependencies:{dependencies}')
|
||||
|
||||
return builder.build(distribution, outdir, config_settings or {})
|
||||
|
||||
|
||||
def _build(
|
||||
isolation: bool,
|
||||
srcdir: StrPath,
|
||||
outdir: StrPath,
|
||||
distribution: Distribution,
|
||||
config_settings: ConfigSettings | None,
|
||||
skip_dependency_check: bool,
|
||||
installer: _env.Installer,
|
||||
) -> str:
|
||||
if isolation:
|
||||
return _build_in_isolated_env(srcdir, outdir, distribution, config_settings, installer)
|
||||
else:
|
||||
return _build_in_current_env(srcdir, outdir, distribution, config_settings, skip_dependency_check)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _handle_build_error() -> Iterator[None]:
|
||||
try:
|
||||
yield
|
||||
except (BuildException, FailedProcessError) as e:
|
||||
_error(str(e))
|
||||
except BuildBackendException as e:
|
||||
if isinstance(e.exception, subprocess.CalledProcessError):
|
||||
_cprint()
|
||||
_error(str(e))
|
||||
|
||||
if e.exc_info:
|
||||
tb_lines = traceback.format_exception(
|
||||
e.exc_info[0],
|
||||
e.exc_info[1],
|
||||
e.exc_info[2],
|
||||
limit=-1,
|
||||
)
|
||||
tb = ''.join(tb_lines)
|
||||
else: # pragma: no cover
|
||||
tb = traceback.format_exc(-1)
|
||||
_cprint('\n{dim}{}{reset}\n', tb.strip('\n'))
|
||||
_error(str(e))
|
||||
except Exception as e: # pragma: no cover
|
||||
tb = traceback.format_exc().strip('\n')
|
||||
_cprint('\n{dim}{}{reset}\n', tb)
|
||||
_error(str(e))
|
||||
|
||||
|
||||
def _natural_language_list(elements: Sequence[str]) -> str:
|
||||
if len(elements) == 0:
|
||||
msg = 'no elements'
|
||||
raise IndexError(msg)
|
||||
elif len(elements) == 1:
|
||||
return elements[0]
|
||||
else:
|
||||
return '{} and {}'.format(
|
||||
', '.join(elements[:-1]),
|
||||
elements[-1],
|
||||
)
|
||||
|
||||
|
||||
def build_package(
|
||||
srcdir: StrPath,
|
||||
outdir: StrPath,
|
||||
distributions: Sequence[Distribution],
|
||||
config_settings: ConfigSettings | None = None,
|
||||
isolation: bool = True,
|
||||
skip_dependency_check: bool = False,
|
||||
installer: _env.Installer = 'pip',
|
||||
) -> Sequence[str]:
|
||||
"""
|
||||
Run the build process.
|
||||
|
||||
:param srcdir: Source directory
|
||||
:param outdir: Output directory
|
||||
:param distribution: Distribution to build (sdist or wheel)
|
||||
:param config_settings: Configuration settings to be passed to the backend
|
||||
:param isolation: Isolate the build in a separate environment
|
||||
:param skip_dependency_check: Do not perform the dependency check
|
||||
"""
|
||||
built: list[str] = []
|
||||
for distribution in distributions:
|
||||
out = _build(isolation, srcdir, outdir, distribution, config_settings, skip_dependency_check, installer)
|
||||
built.append(os.path.basename(out))
|
||||
return built
|
||||
|
||||
|
||||
def build_package_via_sdist(
|
||||
srcdir: StrPath,
|
||||
outdir: StrPath,
|
||||
distributions: Sequence[Distribution],
|
||||
config_settings: ConfigSettings | None = None,
|
||||
isolation: bool = True,
|
||||
skip_dependency_check: bool = False,
|
||||
installer: _env.Installer = 'pip',
|
||||
) -> Sequence[str]:
|
||||
"""
|
||||
Build a sdist and then the specified distributions from it.
|
||||
|
||||
:param srcdir: Source directory
|
||||
:param outdir: Output directory
|
||||
:param distribution: Distribution to build (only wheel)
|
||||
:param config_settings: Configuration settings to be passed to the backend
|
||||
:param isolation: Isolate the build in a separate environment
|
||||
:param skip_dependency_check: Do not perform the dependency check
|
||||
"""
|
||||
from ._compat import tarfile
|
||||
|
||||
if 'sdist' in distributions:
|
||||
msg = 'Only binary distributions are allowed but sdist was specified'
|
||||
raise ValueError(msg)
|
||||
|
||||
sdist = _build(isolation, srcdir, outdir, 'sdist', config_settings, skip_dependency_check, installer)
|
||||
|
||||
sdist_name = os.path.basename(sdist)
|
||||
sdist_out = tempfile.mkdtemp(prefix='build-via-sdist-')
|
||||
built: list[str] = []
|
||||
if distributions:
|
||||
# extract sdist
|
||||
with tarfile.TarFile.open(sdist) as t:
|
||||
t.extractall(sdist_out)
|
||||
try:
|
||||
_ctx.log(f'Building {_natural_language_list(distributions)} from sdist')
|
||||
srcdir = os.path.join(sdist_out, sdist_name[: -len('.tar.gz')])
|
||||
for distribution in distributions:
|
||||
out = _build(isolation, srcdir, outdir, distribution, config_settings, skip_dependency_check, installer)
|
||||
built.append(os.path.basename(out))
|
||||
finally:
|
||||
shutil.rmtree(sdist_out, ignore_errors=True)
|
||||
return [sdist_name, *built]
|
||||
|
||||
|
||||
def main_parser() -> argparse.ArgumentParser:
|
||||
"""
|
||||
Construct the main parser.
|
||||
"""
|
||||
formatter_class = partial(argparse.RawDescriptionHelpFormatter, width=min(_max_terminal_width, 127))
|
||||
# Workaround for 3.14.0 beta 1, can remove once beta 2 is out
|
||||
if sys.version_info >= (3, 14):
|
||||
formatter_class = partial(formatter_class, color=True)
|
||||
|
||||
make_parser = partial(
|
||||
argparse.ArgumentParser,
|
||||
description=textwrap.indent(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
A simple, correct Python build frontend.
|
||||
|
||||
By default, a source distribution (sdist) is built from {srcdir}
|
||||
and a binary distribution (wheel) is built from the sdist.
|
||||
This is recommended as it will ensure the sdist can be used
|
||||
to build wheels.
|
||||
|
||||
Pass -s/--sdist and/or -w/--wheel to build a specific distribution.
|
||||
If you do this, the default behavior will be disabled, and all
|
||||
artifacts will be built from {srcdir} (even if you combine
|
||||
-w/--wheel with -s/--sdist, the wheel will be built from {srcdir}).
|
||||
"""
|
||||
).strip(),
|
||||
' ',
|
||||
),
|
||||
# Prevent argparse from taking up the entire width of the terminal window
|
||||
# which impedes readability. Also keep the description formatted.
|
||||
formatter_class=formatter_class,
|
||||
)
|
||||
if sys.version_info >= (3, 14):
|
||||
make_parser = partial(make_parser, suggest_on_error=True, color=True)
|
||||
|
||||
parser = make_parser()
|
||||
parser.add_argument(
|
||||
'srcdir',
|
||||
type=str,
|
||||
nargs='?',
|
||||
default=os.getcwd(),
|
||||
help='source directory (defaults to current directory)',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--version',
|
||||
'-V',
|
||||
action='version',
|
||||
version=f'build {build.__version__} ({",".join(build.__path__)})',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose',
|
||||
'-v',
|
||||
dest='verbosity',
|
||||
action='count',
|
||||
default=0,
|
||||
help='increase verbosity',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sdist',
|
||||
'-s',
|
||||
dest='distributions',
|
||||
action='append_const',
|
||||
const='sdist',
|
||||
help='build a source distribution (disables the default behavior)',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--wheel',
|
||||
'-w',
|
||||
dest='distributions',
|
||||
action='append_const',
|
||||
const='wheel',
|
||||
help='build a wheel (disables the default behavior)',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--outdir',
|
||||
'-o',
|
||||
type=str,
|
||||
help=f'output directory (defaults to {{srcdir}}{os.sep}dist)',
|
||||
metavar='PATH',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--skip-dependency-check',
|
||||
'-x',
|
||||
action='store_true',
|
||||
help='do not check that build dependencies are installed',
|
||||
)
|
||||
env_group = parser.add_mutually_exclusive_group()
|
||||
env_group.add_argument(
|
||||
'--no-isolation',
|
||||
'-n',
|
||||
action='store_true',
|
||||
help='disable building the project in an isolated virtual environment. '
|
||||
'Build dependencies must be installed separately when this option is used',
|
||||
)
|
||||
env_group.add_argument(
|
||||
'--installer',
|
||||
choices=_env.INSTALLERS,
|
||||
help='Python package installer to use (defaults to pip)',
|
||||
)
|
||||
config_group = parser.add_mutually_exclusive_group()
|
||||
config_group.add_argument(
|
||||
'--config-setting',
|
||||
'-C',
|
||||
dest='config_settings',
|
||||
action='append',
|
||||
help='settings to pass to the backend. Multiple settings can be provided. '
|
||||
'Settings beginning with a hyphen will erroneously be interpreted as options to build if separated '
|
||||
'by a space character; use ``--config-setting=--my-setting -C--my-other-setting``',
|
||||
metavar='KEY[=VALUE]',
|
||||
)
|
||||
config_group.add_argument(
|
||||
'--config-json',
|
||||
dest='config_json',
|
||||
help='settings to pass to the backend as a JSON object. '
|
||||
'This is an alternative to --config-setting that allows complex nested structures. '
|
||||
'Cannot be used together with --config-setting',
|
||||
metavar='JSON_STRING',
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main(cli_args: Sequence[str], prog: str | None = None) -> None:
|
||||
"""
|
||||
Parse the CLI arguments and invoke the build process.
|
||||
|
||||
:param cli_args: CLI arguments
|
||||
:param prog: Program name to show in help text
|
||||
"""
|
||||
parser = main_parser()
|
||||
if prog:
|
||||
parser.prog = prog
|
||||
args = parser.parse_args(cli_args)
|
||||
|
||||
_setup_cli(verbosity=args.verbosity)
|
||||
|
||||
config_settings = {}
|
||||
|
||||
# Handle --config-json
|
||||
if args.config_json:
|
||||
try:
|
||||
config_settings = json.loads(args.config_json)
|
||||
if not isinstance(config_settings, dict):
|
||||
_error('--config-json must contain a JSON object (dict), not a list or primitive value')
|
||||
except json.JSONDecodeError as e:
|
||||
_error(f'Invalid JSON in --config-json: {e}')
|
||||
|
||||
# Handle --config-setting (original logic)
|
||||
elif args.config_settings:
|
||||
for arg in args.config_settings:
|
||||
setting, _, value = arg.partition('=')
|
||||
if setting not in config_settings:
|
||||
config_settings[setting] = value
|
||||
else:
|
||||
if not isinstance(config_settings[setting], list):
|
||||
config_settings[setting] = [config_settings[setting]]
|
||||
|
||||
config_settings[setting].append(value)
|
||||
|
||||
# outdir is relative to srcdir only if omitted.
|
||||
outdir = os.path.join(args.srcdir, 'dist') if args.outdir is None else args.outdir
|
||||
|
||||
distributions: list[Distribution] = args.distributions
|
||||
if distributions:
|
||||
build_call = build_package
|
||||
else:
|
||||
build_call = build_package_via_sdist
|
||||
distributions = ['wheel']
|
||||
|
||||
with _handle_build_error():
|
||||
built = build_call(
|
||||
args.srcdir,
|
||||
outdir,
|
||||
distributions,
|
||||
config_settings,
|
||||
not args.no_isolation,
|
||||
args.skip_dependency_check,
|
||||
args.installer,
|
||||
)
|
||||
artifact_list = _natural_language_list(
|
||||
['{underline}{}{reset}{bold}{green}'.format(artifact, **_styles.get()) for artifact in built]
|
||||
)
|
||||
_cprint('{bold}{green}Successfully built {}{reset}', artifact_list)
|
||||
|
||||
|
||||
def entrypoint() -> None:
|
||||
main(sys.argv[1:])
|
||||
|
||||
|
||||
if __name__ == '__main__': # pragma: no cover
|
||||
main(sys.argv[1:], 'python -m build')
|
||||
|
||||
|
||||
__all__ = [
|
||||
'main',
|
||||
'main_parser',
|
||||
]
|
||||
@@ -0,0 +1,355 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import difflib
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import warnings
|
||||
import zipfile
|
||||
|
||||
from collections.abc import Iterator, Mapping, Sequence
|
||||
from typing import Any, TypeVar
|
||||
|
||||
import pyproject_hooks
|
||||
|
||||
from . import _ctx, env
|
||||
from ._compat import tomllib
|
||||
from ._exceptions import (
|
||||
BuildBackendException,
|
||||
BuildException,
|
||||
BuildSystemTableValidationError,
|
||||
TypoWarning,
|
||||
)
|
||||
from ._types import ConfigSettings, Distribution, StrPath, SubprocessRunner
|
||||
from ._util import check_dependency, parse_wheel_filename
|
||||
|
||||
|
||||
_TProjectBuilder = TypeVar('_TProjectBuilder', bound='ProjectBuilder')
|
||||
|
||||
|
||||
_DEFAULT_BACKEND = {
|
||||
'build-backend': 'setuptools.build_meta:__legacy__',
|
||||
'requires': ['setuptools >= 40.8.0'],
|
||||
}
|
||||
|
||||
|
||||
def _find_typo(dictionary: Mapping[str, str], expected: str) -> None:
|
||||
for obj in dictionary:
|
||||
if difflib.SequenceMatcher(None, expected, obj).ratio() >= 0.8:
|
||||
warnings.warn(
|
||||
f"Found '{obj}' in pyproject.toml, did you mean '{expected}'?",
|
||||
TypoWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
def _validate_source_directory(source_dir: StrPath) -> None:
|
||||
if not os.path.isdir(source_dir):
|
||||
msg = f'Source {source_dir} is not a directory'
|
||||
raise BuildException(msg)
|
||||
pyproject_toml = os.path.join(source_dir, 'pyproject.toml')
|
||||
setup_py = os.path.join(source_dir, 'setup.py')
|
||||
if not os.path.exists(pyproject_toml) and not os.path.exists(setup_py):
|
||||
msg = f'Source {source_dir} does not appear to be a Python project: no pyproject.toml or setup.py'
|
||||
raise BuildException(msg)
|
||||
|
||||
|
||||
def _read_pyproject_toml(path: StrPath) -> Mapping[str, Any]:
|
||||
try:
|
||||
with open(path, 'rb') as f:
|
||||
return tomllib.loads(f.read().decode())
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
except PermissionError as e:
|
||||
msg = f"{e.strerror}: '{e.filename}' "
|
||||
raise BuildException(msg) from None
|
||||
except tomllib.TOMLDecodeError as e:
|
||||
msg = f'Failed to parse {path}: {e} '
|
||||
raise BuildException(msg) from None
|
||||
|
||||
|
||||
def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Mapping[str, Any]:
|
||||
# If pyproject.toml is missing (per PEP 517) or [build-system] is missing
|
||||
# (per PEP 518), use default values
|
||||
if 'build-system' not in pyproject_toml:
|
||||
_find_typo(pyproject_toml, 'build-system')
|
||||
return _DEFAULT_BACKEND
|
||||
|
||||
build_system_table = dict(pyproject_toml['build-system'])
|
||||
|
||||
# If [build-system] is present, it must have a ``requires`` field (per PEP 518)
|
||||
if 'requires' not in build_system_table:
|
||||
_find_typo(build_system_table, 'requires')
|
||||
msg = '`requires` is a required property'
|
||||
raise BuildSystemTableValidationError(msg)
|
||||
elif not isinstance(build_system_table['requires'], list) or not all(
|
||||
isinstance(i, str) for i in build_system_table['requires']
|
||||
):
|
||||
msg = '`requires` must be an array of strings'
|
||||
raise BuildSystemTableValidationError(msg)
|
||||
|
||||
if 'build-backend' not in build_system_table:
|
||||
_find_typo(build_system_table, 'build-backend')
|
||||
# If ``build-backend`` is missing, inject the legacy setuptools backend
|
||||
# but leave ``requires`` intact to emulate pip
|
||||
build_system_table['build-backend'] = _DEFAULT_BACKEND['build-backend']
|
||||
elif not isinstance(build_system_table['build-backend'], str):
|
||||
msg = '`build-backend` must be a string'
|
||||
raise BuildSystemTableValidationError(msg)
|
||||
|
||||
if 'backend-path' in build_system_table and (
|
||||
not isinstance(build_system_table['backend-path'], list)
|
||||
or not all(isinstance(i, str) for i in build_system_table['backend-path'])
|
||||
):
|
||||
msg = '`backend-path` must be an array of strings'
|
||||
raise BuildSystemTableValidationError(msg)
|
||||
|
||||
unknown_props = build_system_table.keys() - {'requires', 'build-backend', 'backend-path'}
|
||||
if unknown_props:
|
||||
msg = f'Unknown properties: {", ".join(unknown_props)}'
|
||||
raise BuildSystemTableValidationError(msg)
|
||||
|
||||
return build_system_table
|
||||
|
||||
|
||||
def _wrap_subprocess_runner(runner: SubprocessRunner, env: env.IsolatedEnv) -> SubprocessRunner:
|
||||
def _invoke_wrapped_runner(
|
||||
cmd: Sequence[str], cwd: str | None = None, extra_environ: Mapping[str, str] | None = None
|
||||
) -> None:
|
||||
runner(cmd, cwd, {**(env.make_extra_environ() or {}), **(extra_environ or {})})
|
||||
|
||||
return _invoke_wrapped_runner
|
||||
|
||||
|
||||
class ProjectBuilder:
|
||||
"""
|
||||
The PEP 517 consumer API.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
source_dir: StrPath,
|
||||
python_executable: str = sys.executable,
|
||||
runner: SubprocessRunner = pyproject_hooks.default_subprocess_runner,
|
||||
) -> None:
|
||||
"""
|
||||
:param source_dir: The source directory
|
||||
:param python_executable: The python executable where the backend lives
|
||||
:param runner: Runner for backend subprocesses
|
||||
|
||||
The ``runner``, if provided, must accept the following arguments:
|
||||
|
||||
- ``cmd``: a list of strings representing the command and arguments to
|
||||
execute, as would be passed to e.g. 'subprocess.check_call'.
|
||||
- ``cwd``: a string representing the working directory that must be
|
||||
used for the subprocess. Corresponds to the provided source_dir.
|
||||
- ``extra_environ``: a dict mapping environment variable names to values
|
||||
which must be set for the subprocess execution.
|
||||
|
||||
The default runner simply calls the backend hooks in a subprocess, writing backend output
|
||||
to stdout/stderr.
|
||||
"""
|
||||
self._source_dir: str = os.path.abspath(source_dir)
|
||||
_validate_source_directory(source_dir)
|
||||
|
||||
self._python_executable = python_executable
|
||||
self._runner = runner
|
||||
|
||||
pyproject_toml_path = os.path.join(source_dir, 'pyproject.toml')
|
||||
self._build_system = _parse_build_system_table(_read_pyproject_toml(pyproject_toml_path))
|
||||
|
||||
self._backend = self._build_system['build-backend']
|
||||
|
||||
self._hook = pyproject_hooks.BuildBackendHookCaller(
|
||||
self._source_dir,
|
||||
self._backend,
|
||||
backend_path=self._build_system.get('backend-path'),
|
||||
python_executable=self._python_executable,
|
||||
runner=self._runner,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_isolated_env(
|
||||
cls: type[_TProjectBuilder],
|
||||
env: env.IsolatedEnv,
|
||||
source_dir: StrPath,
|
||||
runner: SubprocessRunner = pyproject_hooks.default_subprocess_runner,
|
||||
) -> _TProjectBuilder:
|
||||
return cls(
|
||||
source_dir=source_dir,
|
||||
python_executable=env.python_executable,
|
||||
runner=_wrap_subprocess_runner(runner, env),
|
||||
)
|
||||
|
||||
@property
|
||||
def source_dir(self) -> str:
|
||||
"""Project source directory."""
|
||||
return self._source_dir
|
||||
|
||||
@property
|
||||
def python_executable(self) -> str:
|
||||
"""
|
||||
The Python executable used to invoke the backend.
|
||||
"""
|
||||
return self._python_executable
|
||||
|
||||
@property
|
||||
def build_system_requires(self) -> set[str]:
|
||||
"""
|
||||
The dependencies defined in the ``pyproject.toml``'s
|
||||
``build-system.requires`` field or the default build dependencies
|
||||
if ``pyproject.toml`` is missing or ``build-system`` is undefined.
|
||||
"""
|
||||
return set(self._build_system['requires'])
|
||||
|
||||
def get_requires_for_build(
|
||||
self,
|
||||
distribution: Distribution,
|
||||
config_settings: ConfigSettings | None = None,
|
||||
) -> set[str]:
|
||||
"""
|
||||
Return the dependencies defined by the backend in addition to
|
||||
:attr:`build_system_requires` for a given distribution.
|
||||
|
||||
:param distribution: Distribution to get the dependencies of
|
||||
(``sdist`` or ``wheel``)
|
||||
:param config_settings: Config settings for the build backend
|
||||
"""
|
||||
_ctx.log(f'Getting build dependencies for {distribution}...')
|
||||
hook_name = f'get_requires_for_build_{distribution}'
|
||||
get_requires = getattr(self._hook, hook_name)
|
||||
|
||||
with self._handle_backend(hook_name):
|
||||
return set(get_requires(config_settings))
|
||||
|
||||
def check_dependencies(
|
||||
self,
|
||||
distribution: Distribution,
|
||||
config_settings: ConfigSettings | None = None,
|
||||
) -> set[tuple[str, ...]]:
|
||||
"""
|
||||
Return the dependencies which are not satisfied from the combined set of
|
||||
:attr:`build_system_requires` and :meth:`get_requires_for_build` for a given
|
||||
distribution.
|
||||
|
||||
:param distribution: Distribution to check (``sdist`` or ``wheel``)
|
||||
:param config_settings: Config settings for the build backend
|
||||
:returns: Set of variable-length unmet dependency tuples
|
||||
"""
|
||||
dependencies = self.get_requires_for_build(distribution, config_settings).union(self.build_system_requires)
|
||||
return {u for d in dependencies for u in check_dependency(d)}
|
||||
|
||||
def prepare(
|
||||
self,
|
||||
distribution: Distribution,
|
||||
output_directory: StrPath,
|
||||
config_settings: ConfigSettings | None = None,
|
||||
) -> str | None:
|
||||
"""
|
||||
Prepare metadata for a distribution.
|
||||
|
||||
:param distribution: Distribution to build (must be ``wheel``)
|
||||
:param output_directory: Directory to put the prepared metadata in
|
||||
:param config_settings: Config settings for the build backend
|
||||
:returns: The full path to the prepared metadata directory
|
||||
"""
|
||||
_ctx.log(f'Getting metadata for {distribution}...')
|
||||
try:
|
||||
return self._call_backend(
|
||||
f'prepare_metadata_for_build_{distribution}',
|
||||
output_directory,
|
||||
config_settings,
|
||||
_allow_fallback=False,
|
||||
)
|
||||
except BuildBackendException as exception:
|
||||
if isinstance(exception.exception, pyproject_hooks.HookMissing):
|
||||
return None
|
||||
raise
|
||||
|
||||
def build(
|
||||
self,
|
||||
distribution: Distribution,
|
||||
output_directory: StrPath,
|
||||
config_settings: ConfigSettings | None = None,
|
||||
metadata_directory: str | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Build a distribution.
|
||||
|
||||
:param distribution: Distribution to build (``sdist`` or ``wheel``)
|
||||
:param output_directory: Directory to put the built distribution in
|
||||
:param config_settings: Config settings for the build backend
|
||||
:param metadata_directory: If provided, should be the return value of a
|
||||
previous ``prepare`` call on the same ``distribution`` kind
|
||||
:returns: The full path to the built distribution
|
||||
"""
|
||||
_ctx.log(f'Building {distribution}...')
|
||||
kwargs = {} if metadata_directory is None else {'metadata_directory': metadata_directory}
|
||||
return self._call_backend(f'build_{distribution}', output_directory, config_settings, **kwargs)
|
||||
|
||||
def metadata_path(self, output_directory: StrPath) -> str:
|
||||
"""
|
||||
Generate the metadata directory of a distribution and return its path.
|
||||
|
||||
If the backend does not support the ``prepare_metadata_for_build_wheel``
|
||||
hook, a wheel will be built and the metadata will be extracted from it.
|
||||
|
||||
:param output_directory: Directory to put the metadata distribution in
|
||||
:returns: The path of the metadata directory
|
||||
"""
|
||||
# prepare_metadata hook
|
||||
metadata = self.prepare('wheel', output_directory)
|
||||
if metadata is not None:
|
||||
return metadata
|
||||
|
||||
# fallback to build_wheel hook
|
||||
wheel = self.build('wheel', output_directory)
|
||||
match = parse_wheel_filename(os.path.basename(wheel))
|
||||
if not match:
|
||||
msg = 'Invalid wheel'
|
||||
raise ValueError(msg)
|
||||
distinfo = f'{match["distribution"]}-{match["version"]}.dist-info'
|
||||
member_prefix = f'{distinfo}/'
|
||||
with zipfile.ZipFile(wheel) as w:
|
||||
w.extractall(
|
||||
output_directory,
|
||||
(member for member in w.namelist() if member.startswith(member_prefix)),
|
||||
)
|
||||
return os.path.join(output_directory, distinfo)
|
||||
|
||||
def _call_backend(
|
||||
self, hook_name: str, outdir: StrPath, config_settings: ConfigSettings | None = None, **kwargs: Any
|
||||
) -> str:
|
||||
outdir = os.path.abspath(outdir)
|
||||
|
||||
callback = getattr(self._hook, hook_name)
|
||||
|
||||
if os.path.exists(outdir):
|
||||
if not os.path.isdir(outdir):
|
||||
msg = f"Build path '{outdir}' exists and is not a directory"
|
||||
raise BuildException(msg)
|
||||
else:
|
||||
os.makedirs(outdir)
|
||||
|
||||
with self._handle_backend(hook_name):
|
||||
basename: str = callback(outdir, config_settings, **kwargs)
|
||||
|
||||
return os.path.join(outdir, basename)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _handle_backend(self, hook: str) -> Iterator[None]:
|
||||
try:
|
||||
yield
|
||||
except pyproject_hooks.BackendUnavailable as exception:
|
||||
raise BuildBackendException(
|
||||
exception,
|
||||
f"Backend '{self._backend}' is not available.",
|
||||
sys.exc_info(),
|
||||
) from None
|
||||
except subprocess.CalledProcessError as exception:
|
||||
raise BuildBackendException(exception, f'Backend subprocess exited when trying to invoke {hook}') from None
|
||||
except Exception as exception:
|
||||
raise BuildBackendException(exception, exc_info=sys.exc_info()) from None
|
||||
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import typing
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import importlib_metadata as metadata
|
||||
else:
|
||||
if sys.version_info >= (3, 10, 2):
|
||||
from importlib import metadata
|
||||
else:
|
||||
try:
|
||||
import importlib_metadata as metadata
|
||||
except ModuleNotFoundError:
|
||||
# helps bootstrapping when dependencies aren't installed
|
||||
from importlib import metadata
|
||||
|
||||
|
||||
__all__ = [
|
||||
'metadata',
|
||||
]
|
||||
@@ -0,0 +1,31 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import tarfile
|
||||
import typing
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
TarFile = tarfile.TarFile
|
||||
|
||||
else:
|
||||
# Per https://peps.python.org/pep-0706/, the "data" filter will become
|
||||
# the default in Python 3.14. The first series of releases with the filter
|
||||
# had a broken filter that could not process symlinks correctly.
|
||||
if (
|
||||
(3, 9, 18) <= sys.version_info < (3, 10)
|
||||
or (3, 10, 13) <= sys.version_info < (3, 11)
|
||||
or (3, 11, 5) <= sys.version_info < (3, 12)
|
||||
or (3, 12) <= sys.version_info < (3, 14)
|
||||
):
|
||||
|
||||
class TarFile(tarfile.TarFile):
|
||||
extraction_filter = staticmethod(tarfile.data_filter)
|
||||
|
||||
else:
|
||||
TarFile = tarfile.TarFile
|
||||
|
||||
|
||||
__all__ = [
|
||||
'TarFile',
|
||||
]
|
||||
@@ -0,0 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from tomllib import TOMLDecodeError, load, loads
|
||||
else:
|
||||
from tomli import TOMLDecodeError, load, loads
|
||||
|
||||
|
||||
__all__ = [
|
||||
'TOMLDecodeError',
|
||||
'load',
|
||||
'loads',
|
||||
]
|
||||
@@ -0,0 +1,99 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextvars
|
||||
import logging
|
||||
import subprocess
|
||||
import typing
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
from functools import partial
|
||||
|
||||
from ._types import StrPath
|
||||
|
||||
|
||||
class _Logger(typing.Protocol): # pragma: no cover
|
||||
def __call__(self, message: str, *, origin: tuple[str, ...] | None = None) -> None: ...
|
||||
|
||||
|
||||
_package_name = __spec__.parent
|
||||
_default_logger = logging.getLogger(_package_name)
|
||||
|
||||
|
||||
def _log_default(message: str, *, origin: tuple[str, ...] | None = None) -> None:
|
||||
if origin is None:
|
||||
_default_logger.log(logging.INFO, message, stacklevel=2)
|
||||
|
||||
|
||||
LOGGER = contextvars.ContextVar('LOGGER', default=_log_default)
|
||||
VERBOSITY = contextvars.ContextVar('VERBOSITY', default=0)
|
||||
|
||||
|
||||
def log_subprocess_error(error: subprocess.CalledProcessError) -> None:
|
||||
log = LOGGER.get()
|
||||
|
||||
log(subprocess.list2cmdline(error.cmd), origin=('subprocess', 'cmd'))
|
||||
|
||||
for stream_name in ('stdout', 'stderr'):
|
||||
stream = getattr(error, stream_name)
|
||||
if stream:
|
||||
log(stream.decode() if isinstance(stream, bytes) else stream, origin=('subprocess', stream_name))
|
||||
|
||||
|
||||
def run_subprocess(cmd: Sequence[StrPath], env: Mapping[str, str] | None = None) -> None:
|
||||
verbosity = VERBOSITY.get()
|
||||
|
||||
if verbosity:
|
||||
import concurrent.futures
|
||||
|
||||
log = LOGGER.get()
|
||||
|
||||
def log_stream(stream_name: str, stream: typing.IO[str]) -> None:
|
||||
for line in stream:
|
||||
log(line, origin=('subprocess', stream_name))
|
||||
|
||||
with (
|
||||
concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor,
|
||||
subprocess.Popen(cmd, encoding='utf-8', env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process,
|
||||
):
|
||||
log(subprocess.list2cmdline(cmd), origin=('subprocess', 'cmd'))
|
||||
|
||||
# Logging in sub-thread to more-or-less ensure order of stdout and stderr whilst also
|
||||
# being able to distinguish between the two.
|
||||
concurrent.futures.wait(
|
||||
[executor.submit(partial(log_stream, n, getattr(process, n))) for n in ('stdout', 'stderr')]
|
||||
)
|
||||
|
||||
code = process.wait()
|
||||
if code: # pragma: no cover
|
||||
raise subprocess.CalledProcessError(code, process.args)
|
||||
|
||||
else:
|
||||
try:
|
||||
subprocess.run(cmd, capture_output=True, check=True, env=env)
|
||||
except subprocess.CalledProcessError as error:
|
||||
log_subprocess_error(error)
|
||||
raise
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
log: _Logger
|
||||
verbosity: bool
|
||||
|
||||
else:
|
||||
|
||||
def __getattr__(name):
|
||||
if name == 'log':
|
||||
return LOGGER.get()
|
||||
elif name == 'verbosity':
|
||||
return VERBOSITY.get()
|
||||
raise AttributeError(name) # pragma: no cover
|
||||
|
||||
|
||||
__all__ = [
|
||||
'LOGGER',
|
||||
'VERBOSITY',
|
||||
'log',
|
||||
'log_subprocess_error',
|
||||
'run_subprocess',
|
||||
'verbosity',
|
||||
]
|
||||
@@ -0,0 +1,65 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
import types
|
||||
|
||||
|
||||
class BuildException(Exception):
|
||||
"""
|
||||
Exception raised by :class:`build.ProjectBuilder`.
|
||||
"""
|
||||
|
||||
|
||||
class BuildBackendException(Exception):
|
||||
"""
|
||||
Exception raised when a backend operation fails.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
exception: Exception,
|
||||
description: str | None = None,
|
||||
exc_info: tuple[type[BaseException], BaseException, types.TracebackType] | tuple[None, None, None] = (
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
),
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.exception = exception
|
||||
self.exc_info = exc_info
|
||||
self._description = description
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self._description:
|
||||
return self._description
|
||||
return f'Backend operation failed: {self.exception!r}'
|
||||
|
||||
|
||||
class BuildSystemTableValidationError(BuildException):
|
||||
"""
|
||||
Exception raised when the ``[build-system]`` table in pyproject.toml is invalid.
|
||||
"""
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'Failed to validate `build-system` in pyproject.toml: {self.args[0]}'
|
||||
|
||||
|
||||
class FailedProcessError(Exception):
|
||||
"""
|
||||
Exception raised when a setup or preparation operation fails.
|
||||
"""
|
||||
|
||||
def __init__(self, exception: subprocess.CalledProcessError, description: str) -> None:
|
||||
super().__init__()
|
||||
self.exception = exception
|
||||
self._description = description
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self._description
|
||||
|
||||
|
||||
class TypoWarning(Warning):
|
||||
"""
|
||||
Warning raised when a possible typo is found.
|
||||
"""
|
||||
@@ -0,0 +1,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import typing
|
||||
|
||||
|
||||
__all__ = ['ConfigSettings', 'Distribution', 'StrPath', 'SubprocessRunner']
|
||||
|
||||
ConfigSettings = typing.Mapping[str, typing.Union[str, typing.Sequence[str]]]
|
||||
Distribution = typing.Literal['sdist', 'wheel', 'editable']
|
||||
|
||||
StrPath = typing.Union[str, os.PathLike[str]]
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from pyproject_hooks import SubprocessRunner
|
||||
else:
|
||||
SubprocessRunner = typing.Callable[
|
||||
[typing.Sequence[str], typing.Optional[str], typing.Optional[typing.Mapping[str, str]]], None
|
||||
]
|
||||
@@ -0,0 +1,63 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
from collections.abc import Iterator, Set
|
||||
|
||||
|
||||
_WHEEL_FILENAME_REGEX = re.compile(
|
||||
r'(?P<distribution>.+)-(?P<version>.+)'
|
||||
r'(-(?P<build_tag>.+))?-(?P<python_tag>.+)'
|
||||
r'-(?P<abi_tag>.+)-(?P<platform_tag>.+)\.whl'
|
||||
)
|
||||
|
||||
|
||||
def check_dependency(
|
||||
req_string: str, ancestral_req_strings: tuple[str, ...] = (), parent_extras: Set[str] = frozenset()
|
||||
) -> Iterator[tuple[str, ...]]:
|
||||
"""
|
||||
Verify that a dependency and all of its dependencies are met.
|
||||
|
||||
:param req_string: Requirement string
|
||||
:param parent_extras: Extras (eg. "test" in myproject[test])
|
||||
:yields: Unmet dependencies
|
||||
"""
|
||||
import packaging.requirements
|
||||
|
||||
from ._compat import importlib
|
||||
|
||||
req = packaging.requirements.Requirement(req_string)
|
||||
normalised_req_string = str(req)
|
||||
|
||||
# ``Requirement`` doesn't implement ``__eq__`` so we cannot compare reqs for
|
||||
# equality directly but the string representation is stable.
|
||||
if normalised_req_string in ancestral_req_strings:
|
||||
# cyclical dependency, already checked.
|
||||
return
|
||||
|
||||
if req.marker:
|
||||
extras = frozenset(('',)).union(parent_extras)
|
||||
# a requirement can have multiple extras but ``evaluate`` can
|
||||
# only check one at a time.
|
||||
if all(not req.marker.evaluate(environment={'extra': e}) for e in extras):
|
||||
# if the marker conditions are not met, we pretend that the
|
||||
# dependency is satisfied.
|
||||
return
|
||||
|
||||
try:
|
||||
dist = importlib.metadata.distribution(req.name)
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
# dependency is not installed in the environment.
|
||||
yield (*ancestral_req_strings, normalised_req_string)
|
||||
else:
|
||||
if req.specifier and not req.specifier.contains(dist.version, prereleases=True):
|
||||
# the installed version is incompatible.
|
||||
yield (*ancestral_req_strings, normalised_req_string)
|
||||
elif dist.requires:
|
||||
for other_req_string in dist.requires:
|
||||
# yields transitive dependencies that are not satisfied.
|
||||
yield from check_dependency(other_req_string, (*ancestral_req_strings, normalised_req_string), req.extras)
|
||||
|
||||
|
||||
def parse_wheel_filename(filename: str) -> re.Match[str] | None:
|
||||
return _WHEEL_FILENAME_REGEX.match(filename)
|
||||
393
backend_service/venv/lib/python3.13/site-packages/build/env.py
Normal file
393
backend_service/venv/lib/python3.13/site-packages/build/env.py
Normal file
@@ -0,0 +1,393 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import functools
|
||||
import importlib.util
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import sysconfig
|
||||
import tempfile
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
from collections.abc import Collection, Mapping
|
||||
|
||||
from . import _ctx
|
||||
from ._ctx import run_subprocess
|
||||
from ._exceptions import FailedProcessError
|
||||
from ._util import check_dependency
|
||||
|
||||
|
||||
Installer = typing.Literal['pip', 'uv']
|
||||
|
||||
INSTALLERS = typing.get_args(Installer)
|
||||
|
||||
|
||||
class IsolatedEnv(typing.Protocol):
|
||||
"""Isolated build environment ABC."""
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def python_executable(self) -> str:
|
||||
"""The Python executable of the isolated environment."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def make_extra_environ(self) -> Mapping[str, str] | None:
|
||||
"""Generate additional env vars specific to the isolated environment."""
|
||||
|
||||
|
||||
def _has_dependency(name: str, minimum_version_str: str | None = None, /, **distargs: object) -> bool | None:
|
||||
"""
|
||||
Given a path, see if a package is present and return True if the version is
|
||||
sufficient for build, False if it is not, None if the package is missing.
|
||||
"""
|
||||
from packaging.version import Version
|
||||
|
||||
from ._compat import importlib
|
||||
|
||||
try:
|
||||
distribution = next(iter(importlib.metadata.distributions(name=name, **distargs)))
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
if minimum_version_str is None:
|
||||
return True
|
||||
|
||||
return Version(distribution.version) >= Version(minimum_version_str)
|
||||
|
||||
|
||||
class DefaultIsolatedEnv(IsolatedEnv):
|
||||
"""
|
||||
Isolated environment which supports several different underlying implementations.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
installer: Installer = 'pip',
|
||||
) -> None:
|
||||
self.installer: Installer = installer
|
||||
|
||||
def __enter__(self) -> DefaultIsolatedEnv:
|
||||
try:
|
||||
path = tempfile.mkdtemp(prefix='build-env-')
|
||||
# Call ``realpath`` to prevent spurious warning from being emitted
|
||||
# that the venv location has changed on Windows for the venv impl.
|
||||
# The username is DOS-encoded in the output of tempfile - the location is the same
|
||||
# but the representation of it is different, which confuses venv.
|
||||
# Ref: https://bugs.python.org/issue46171
|
||||
path = os.path.realpath(path)
|
||||
self._path = path
|
||||
|
||||
self._env_backend: _EnvBackend
|
||||
|
||||
# uv is opt-in only.
|
||||
if self.installer == 'uv':
|
||||
self._env_backend = _UvBackend()
|
||||
else:
|
||||
self._env_backend = _PipBackend()
|
||||
|
||||
_ctx.log(f'Creating isolated environment: {self._env_backend.display_name}...')
|
||||
self._env_backend.create(self._path)
|
||||
|
||||
except Exception: # cleanup folder if creation fails
|
||||
self.__exit__(*sys.exc_info())
|
||||
raise
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, *args: object) -> None:
|
||||
if os.path.exists(self._path): # in case the user already deleted skip remove
|
||||
shutil.rmtree(self._path)
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
"""The location of the isolated build environment."""
|
||||
return self._path
|
||||
|
||||
@property
|
||||
def python_executable(self) -> str:
|
||||
"""The python executable of the isolated build environment."""
|
||||
return self._env_backend.python_executable
|
||||
|
||||
def make_extra_environ(self) -> dict[str, str]:
|
||||
path = os.environ.get('PATH')
|
||||
return {
|
||||
'PATH': os.pathsep.join([self._env_backend.scripts_dir, path])
|
||||
if path is not None
|
||||
else self._env_backend.scripts_dir
|
||||
}
|
||||
|
||||
def install(self, requirements: Collection[str]) -> None:
|
||||
"""
|
||||
Install packages from PEP 508 requirements in the isolated build environment.
|
||||
|
||||
:param requirements: PEP 508 requirement specification to install
|
||||
|
||||
:note: Passing non-PEP 508 strings will result in undefined behavior, you *should not* rely on it. It is
|
||||
merely an implementation detail, it may change any time without warning.
|
||||
"""
|
||||
if not requirements:
|
||||
return
|
||||
|
||||
_ctx.log('Installing packages in isolated environment:\n' + '\n'.join(f'- {r}' for r in sorted(requirements)))
|
||||
self._env_backend.install_requirements(requirements)
|
||||
|
||||
|
||||
class _EnvBackend(typing.Protocol): # pragma: no cover
|
||||
python_executable: str
|
||||
scripts_dir: str
|
||||
|
||||
def create(self, path: str) -> None: ...
|
||||
|
||||
def install_requirements(self, requirements: Collection[str]) -> None: ...
|
||||
|
||||
@property
|
||||
def display_name(self) -> str: ...
|
||||
|
||||
|
||||
class _PipBackend(_EnvBackend):
|
||||
def __init__(self) -> None:
|
||||
self._create_with_virtualenv = not self._has_valid_outer_pip and self._has_virtualenv
|
||||
|
||||
@functools.cached_property
|
||||
def _has_valid_outer_pip(self) -> bool | None:
|
||||
"""
|
||||
This checks for a valid global pip. Returns None if pip is missing, False
|
||||
if pip is too old, and True if it can be used.
|
||||
"""
|
||||
|
||||
# Version to have added the `--python` option.
|
||||
if not _has_dependency('pip', '22.3'): # pragma: no cover
|
||||
return False
|
||||
|
||||
# `pip install --python` is nonfunctional on Gentoo debundled pip.
|
||||
# Detect that by checking if pip._vendor` module exists. However,
|
||||
# searching for pip could yield warnings from _distutils_hack,
|
||||
# so silence them.
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter('ignore')
|
||||
if importlib.util.find_spec('pip._vendor') is None:
|
||||
return False # pragma: no cover
|
||||
|
||||
return True
|
||||
|
||||
@functools.cached_property
|
||||
def _has_virtualenv(self) -> bool:
|
||||
"""
|
||||
virtualenv might be incompatible if it was installed separately
|
||||
from build. This verifies that virtualenv and all of its
|
||||
dependencies are installed as required by build.
|
||||
"""
|
||||
from packaging.requirements import Requirement
|
||||
|
||||
name = 'virtualenv'
|
||||
|
||||
return importlib.util.find_spec(name) is not None and not any(
|
||||
Requirement(d[1]).name == name for d in check_dependency(f'build[{name}]') if len(d) > 1
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_minimum_pip_version_str() -> str:
|
||||
if platform.system() == 'Darwin':
|
||||
release, _, machine = platform.mac_ver()
|
||||
if int(release[: release.find('.')]) >= 11:
|
||||
# macOS 11+ name scheme change requires 20.3. Intel macOS 11.0 can be
|
||||
# told to report 10.16 for backwards compatibility; but that also fixes
|
||||
# earlier versions of pip so this is only needed for 11+.
|
||||
is_apple_silicon_python = machine != 'x86_64'
|
||||
return '21.0.1' if is_apple_silicon_python else '20.3.0'
|
||||
|
||||
# PEP-517 and manylinux1 was first implemented in 19.1
|
||||
return '19.1.0'
|
||||
|
||||
def create(self, path: str) -> None:
|
||||
if self._create_with_virtualenv:
|
||||
import packaging.version
|
||||
import virtualenv
|
||||
|
||||
from ._compat import importlib
|
||||
|
||||
virtualenv_ver = packaging.version.Version(importlib.metadata.version('virtualenv'))
|
||||
|
||||
opts = [
|
||||
path,
|
||||
'--activators',
|
||||
'',
|
||||
'--no-setuptools',
|
||||
'--no-periodic-update',
|
||||
]
|
||||
|
||||
if virtualenv_ver < packaging.version.Version('20.31.0'):
|
||||
opts.append('--no-wheel')
|
||||
|
||||
result = virtualenv.cli_run(opts, setup_logging=False)
|
||||
|
||||
# The creator attributes are `pathlib.Path`s.
|
||||
self.python_executable = str(result.creator.exe)
|
||||
self.scripts_dir = str(result.creator.script_dir)
|
||||
|
||||
else:
|
||||
import venv
|
||||
|
||||
with_pip = not self._has_valid_outer_pip
|
||||
|
||||
try:
|
||||
venv.EnvBuilder(symlinks=_fs_supports_symlink(), with_pip=with_pip).create(path)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
_ctx.log_subprocess_error(exc)
|
||||
raise FailedProcessError(exc, 'Failed to create venv. Maybe try installing virtualenv.') from None
|
||||
|
||||
self.python_executable, self.scripts_dir, purelib = _find_executable_and_scripts(path)
|
||||
|
||||
if with_pip:
|
||||
minimum_pip_version_str = self._get_minimum_pip_version_str()
|
||||
if not _has_dependency(
|
||||
'pip',
|
||||
minimum_pip_version_str,
|
||||
path=[purelib],
|
||||
):
|
||||
run_subprocess([self.python_executable, '-Im', 'pip', 'install', f'pip>={minimum_pip_version_str}'])
|
||||
|
||||
# Uninstall setuptools from the build env to prevent depending on it implicitly.
|
||||
# Pythons 3.12 and up do not install setuptools, check if it exists first.
|
||||
if _has_dependency(
|
||||
'setuptools',
|
||||
path=[purelib],
|
||||
):
|
||||
run_subprocess([self.python_executable, '-Im', 'pip', 'uninstall', '-y', 'setuptools'])
|
||||
|
||||
def install_requirements(self, requirements: Collection[str]) -> None:
|
||||
# pip does not honour environment markers in command line arguments
|
||||
# but it does from requirement files.
|
||||
with tempfile.NamedTemporaryFile('w', prefix='build-reqs-', suffix='.txt', delete=False, encoding='utf-8') as req_file:
|
||||
req_file.write(os.linesep.join(requirements))
|
||||
|
||||
try:
|
||||
if self._has_valid_outer_pip:
|
||||
cmd = [sys.executable, '-m', 'pip', '--python', self.python_executable]
|
||||
else:
|
||||
cmd = [self.python_executable, '-Im', 'pip']
|
||||
|
||||
if _ctx.verbosity > 1:
|
||||
cmd += [f'-{"v" * (_ctx.verbosity - 1)}']
|
||||
|
||||
cmd += [
|
||||
'install',
|
||||
'--use-pep517',
|
||||
'--no-warn-script-location',
|
||||
'--no-compile',
|
||||
'-r',
|
||||
os.path.abspath(req_file.name),
|
||||
]
|
||||
run_subprocess(cmd)
|
||||
|
||||
finally:
|
||||
os.unlink(req_file.name)
|
||||
|
||||
@property
|
||||
def display_name(self) -> str:
|
||||
return 'virtualenv+pip' if self._create_with_virtualenv else 'venv+pip'
|
||||
|
||||
|
||||
class _UvBackend(_EnvBackend):
|
||||
def create(self, path: str) -> None:
|
||||
import venv
|
||||
|
||||
self._env_path = path
|
||||
|
||||
try:
|
||||
import uv
|
||||
|
||||
self._uv_bin = uv.find_uv_bin()
|
||||
except (ModuleNotFoundError, FileNotFoundError):
|
||||
uv_bin = shutil.which('uv')
|
||||
if uv_bin is None:
|
||||
msg = 'uv executable not found'
|
||||
raise RuntimeError(msg) from None
|
||||
|
||||
_ctx.log(f'Using external uv from {uv_bin}')
|
||||
self._uv_bin = uv_bin
|
||||
|
||||
venv.EnvBuilder(symlinks=_fs_supports_symlink(), with_pip=False).create(self._env_path)
|
||||
self.python_executable, self.scripts_dir, _ = _find_executable_and_scripts(self._env_path)
|
||||
|
||||
def install_requirements(self, requirements: Collection[str]) -> None:
|
||||
cmd = [self._uv_bin, 'pip']
|
||||
if _ctx.verbosity > 1:
|
||||
cmd += [f'-{"v" * min(2, _ctx.verbosity - 1)}']
|
||||
run_subprocess([*cmd, 'install', *requirements], env={**os.environ, 'VIRTUAL_ENV': self._env_path})
|
||||
|
||||
@property
|
||||
def display_name(self) -> str:
|
||||
return 'venv+uv'
|
||||
|
||||
|
||||
@functools.cache
|
||||
def _fs_supports_symlink() -> bool:
|
||||
"""Return True if symlinks are supported"""
|
||||
# Using definition used by venv.main()
|
||||
if os.name != 'nt':
|
||||
return True
|
||||
|
||||
# Windows may support symlinks (setting in Windows 10)
|
||||
with tempfile.NamedTemporaryFile(prefix='build-symlink-') as tmp_file:
|
||||
dest = f'{tmp_file}-b'
|
||||
try:
|
||||
os.symlink(tmp_file.name, dest)
|
||||
os.unlink(dest)
|
||||
except (OSError, NotImplementedError, AttributeError):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _find_executable_and_scripts(path: str) -> tuple[str, str, str]:
|
||||
"""
|
||||
Detect the Python executable and script folder of a virtual environment.
|
||||
|
||||
:param path: The location of the virtual environment
|
||||
:return: The Python executable, script folder, and purelib folder
|
||||
"""
|
||||
config_vars = sysconfig.get_config_vars().copy() # globally cached, copy before altering it
|
||||
config_vars['base'] = path
|
||||
scheme_names = sysconfig.get_scheme_names()
|
||||
if 'venv' in scheme_names:
|
||||
# Python distributors with custom default installation scheme can set a
|
||||
# scheme that can't be used to expand the paths in a venv.
|
||||
# This can happen if build itself is not installed in a venv.
|
||||
# The distributors are encouraged to set a "venv" scheme to be used for this.
|
||||
# See https://bugs.python.org/issue45413
|
||||
# and https://github.com/pypa/virtualenv/issues/2208
|
||||
paths = sysconfig.get_paths(scheme='venv', vars=config_vars)
|
||||
elif 'posix_local' in scheme_names:
|
||||
# The Python that ships on Debian/Ubuntu varies the default scheme to
|
||||
# install to /usr/local
|
||||
# But it does not (yet) set the "venv" scheme.
|
||||
# If we're the Debian "posix_local" scheme is available, but "venv"
|
||||
# is not, we use "posix_prefix" instead which is venv-compatible there.
|
||||
paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
|
||||
elif 'osx_framework_library' in scheme_names:
|
||||
# The Python that ships with the macOS developer tools varies the
|
||||
# default scheme depending on whether the ``sys.prefix`` is part of a framework.
|
||||
# But it does not (yet) set the "venv" scheme.
|
||||
# If the Apple-custom "osx_framework_library" scheme is available but "venv"
|
||||
# is not, we use "posix_prefix" instead which is venv-compatible there.
|
||||
paths = sysconfig.get_paths(scheme='posix_prefix', vars=config_vars)
|
||||
else:
|
||||
paths = sysconfig.get_paths(vars=config_vars)
|
||||
|
||||
executable = os.path.join(paths['scripts'], 'python.exe' if os.name == 'nt' else 'python')
|
||||
if not os.path.exists(executable):
|
||||
msg = f'Virtual environment creation failed, executable {executable} missing'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return executable, paths['scripts'], paths['purelib']
|
||||
|
||||
|
||||
__all__ = [
|
||||
'DefaultIsolatedEnv',
|
||||
'IsolatedEnv',
|
||||
]
|
||||
@@ -0,0 +1,63 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pathlib
|
||||
import tempfile
|
||||
|
||||
import pyproject_hooks
|
||||
|
||||
from . import ProjectBuilder
|
||||
from ._compat import importlib
|
||||
from ._types import StrPath, SubprocessRunner
|
||||
from .env import DefaultIsolatedEnv
|
||||
|
||||
|
||||
def _project_wheel_metadata(builder: ProjectBuilder) -> importlib.metadata.PackageMetadata:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
path = pathlib.Path(builder.metadata_path(tmpdir))
|
||||
metadata = importlib.metadata.PathDistribution(path).metadata
|
||||
assert metadata is not None
|
||||
return metadata
|
||||
|
||||
|
||||
def project_wheel_metadata(
|
||||
source_dir: StrPath,
|
||||
isolated: bool = True,
|
||||
*,
|
||||
runner: SubprocessRunner = pyproject_hooks.quiet_subprocess_runner,
|
||||
) -> importlib.metadata.PackageMetadata:
|
||||
"""
|
||||
Return the wheel metadata for a project.
|
||||
|
||||
Uses the ``prepare_metadata_for_build_wheel`` hook if available,
|
||||
otherwise ``build_wheel``.
|
||||
|
||||
:param source_dir: Project source directory
|
||||
:param isolated: Whether or not to run invoke the backend in the current
|
||||
environment or to create an isolated one and invoke it
|
||||
there.
|
||||
:param runner: An alternative runner for backend subprocesses
|
||||
"""
|
||||
|
||||
if isolated:
|
||||
with DefaultIsolatedEnv() as env:
|
||||
builder = ProjectBuilder.from_isolated_env(
|
||||
env,
|
||||
source_dir,
|
||||
runner=runner,
|
||||
)
|
||||
env.install(builder.build_system_requires)
|
||||
env.install(builder.get_requires_for_build('wheel'))
|
||||
return _project_wheel_metadata(builder)
|
||||
else:
|
||||
builder = ProjectBuilder(
|
||||
source_dir,
|
||||
runner=runner,
|
||||
)
|
||||
return _project_wheel_metadata(builder)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'project_wheel_metadata',
|
||||
]
|
||||
Reference in New Issue
Block a user