2022-04-22 14:48:09 +00:00
|
|
|
import glob
|
|
|
|
import io
|
2019-08-13 12:42:21 +00:00
|
|
|
import os
|
2019-07-02 14:53:05 +00:00
|
|
|
import sys
|
2021-02-02 22:03:27 +00:00
|
|
|
import importlib.metadata
|
2019-07-17 13:44:22 +00:00
|
|
|
import argparse
|
|
|
|
import traceback
|
2021-03-29 11:46:30 +00:00
|
|
|
import json
|
2019-07-17 13:44:22 +00:00
|
|
|
import subprocess
|
|
|
|
import re
|
2019-08-12 15:29:26 +00:00
|
|
|
import tempfile
|
2019-07-17 16:16:16 +00:00
|
|
|
import email.parser
|
2021-06-28 11:23:19 +00:00
|
|
|
import pathlib
|
2022-04-22 14:48:09 +00:00
|
|
|
import zipfile
|
2021-07-22 10:01:48 +00:00
|
|
|
|
|
|
|
from pyproject_requirements_txt import convert_requirements_txt
|
2023-05-20 00:37:27 +00:00
|
|
|
from pyproject_wheel import parse_config_settings_args
|
2019-07-17 13:44:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
# Some valid Python version specifiers are not supported.
|
2021-07-12 21:24:58 +00:00
|
|
|
# Allow only the forms we know we can handle.
|
|
|
|
VERSION_RE = re.compile(r'[a-zA-Z0-9.-]+(\.\*)?')
|
2019-07-17 13:44:22 +00:00
|
|
|
|
2019-10-25 15:07:29 +00:00
|
|
|
|
2019-07-17 13:44:22 +00:00
|
|
|
class EndPass(Exception):
|
|
|
|
"""End current pass of generating requirements"""
|
2019-07-02 14:53:05 +00:00
|
|
|
|
2019-10-25 15:07:29 +00:00
|
|
|
|
2021-07-12 21:24:58 +00:00
|
|
|
# nb: we don't use functools.partial to be able to use pytest's capsys
|
|
|
|
# see https://github.com/pytest-dev/pytest/issues/8900
|
|
|
|
def print_err(*args, **kwargs):
|
|
|
|
kwargs.setdefault('file', sys.stderr)
|
|
|
|
print(*args, **kwargs)
|
|
|
|
|
|
|
|
|
2019-07-02 14:53:05 +00:00
|
|
|
try:
|
|
|
|
from packaging.requirements import Requirement, InvalidRequirement
|
2021-06-29 10:36:21 +00:00
|
|
|
from packaging.utils import canonicalize_name
|
2019-07-17 13:44:22 +00:00
|
|
|
except ImportError as e:
|
|
|
|
print_err('Import error:', e)
|
2019-07-02 14:53:05 +00:00
|
|
|
# already echoed by the %pyproject_buildrequires macro
|
|
|
|
sys.exit(0)
|
|
|
|
|
2020-09-09 04:47:51 +00:00
|
|
|
# uses packaging, needs to be imported after packaging is verified to be present
|
|
|
|
from pyproject_convert import convert
|
|
|
|
|
2019-07-02 14:53:05 +00:00
|
|
|
|
2021-07-20 13:21:35 +00:00
|
|
|
def guess_reason_for_invalid_requirement(requirement_str):
|
|
|
|
if ':' in requirement_str:
|
2021-07-22 10:01:48 +00:00
|
|
|
message = (
|
2021-07-20 13:21:35 +00:00
|
|
|
'It might be an URL. '
|
|
|
|
'%pyproject_buildrequires cannot handle all URL-based requirements. '
|
|
|
|
'Add PackageName@ (see PEP 508) to the URL to at least require any version of PackageName.'
|
|
|
|
)
|
2021-07-22 10:01:48 +00:00
|
|
|
if '@' in requirement_str:
|
|
|
|
message += ' (but note that URLs might not work well with other features)'
|
|
|
|
return message
|
2021-07-20 13:21:35 +00:00
|
|
|
if '/' in requirement_str:
|
|
|
|
return (
|
|
|
|
'It might be a local path. '
|
|
|
|
'%pyproject_buildrequires cannot handle local paths as requirements. '
|
|
|
|
'Use an URL with PackageName@ (see PEP 508) to at least require any version of PackageName.'
|
|
|
|
)
|
|
|
|
# No more ideas
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2019-07-17 13:44:22 +00:00
|
|
|
class Requirements:
|
2023-03-31 16:57:54 +00:00
|
|
|
"""Requirement gatherer. The macro will eventually print out output_lines."""
|
2020-09-30 20:31:07 +00:00
|
|
|
def __init__(self, get_installed_version, extras=None,
|
2023-05-20 00:37:27 +00:00
|
|
|
generate_extras=False, python3_pkgversion='3', config_settings=None):
|
2019-08-10 12:34:24 +00:00
|
|
|
self.get_installed_version = get_installed_version
|
2023-03-31 16:57:54 +00:00
|
|
|
self.output_lines = []
|
2020-09-30 20:12:03 +00:00
|
|
|
self.extras = set()
|
2019-07-17 13:44:22 +00:00
|
|
|
|
2020-08-11 13:22:05 +00:00
|
|
|
if extras:
|
2020-09-30 20:31:07 +00:00
|
|
|
for extra in extras:
|
|
|
|
self.add_extras(*extra.split(','))
|
2019-07-18 08:50:13 +00:00
|
|
|
|
2019-07-17 13:44:22 +00:00
|
|
|
self.missing_requirements = False
|
2023-04-27 07:47:47 +00:00
|
|
|
self.ignored_alien_requirements = []
|
2019-07-17 13:44:22 +00:00
|
|
|
|
2020-08-14 12:57:40 +00:00
|
|
|
self.generate_extras = generate_extras
|
2020-07-22 16:13:50 +00:00
|
|
|
self.python3_pkgversion = python3_pkgversion
|
2023-05-20 00:37:27 +00:00
|
|
|
self.config_settings = config_settings
|
2020-07-22 16:13:50 +00:00
|
|
|
|
2020-09-30 20:12:03 +00:00
|
|
|
def add_extras(self, *extras):
|
|
|
|
self.extras |= set(e.strip() for e in extras)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def marker_envs(self):
|
|
|
|
if self.extras:
|
|
|
|
return [{'extra': e} for e in sorted(self.extras)]
|
|
|
|
return [{'extra': ''}]
|
|
|
|
|
2022-08-30 20:22:59 +00:00
|
|
|
def evaluate_all_environments(self, requirement):
|
2020-08-11 13:22:05 +00:00
|
|
|
for marker_env in self.marker_envs:
|
|
|
|
if requirement.marker.evaluate(environment=marker_env):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2023-04-27 07:47:47 +00:00
|
|
|
def add(self, requirement_str, *, package_name=None, source=None):
|
2019-07-17 13:44:22 +00:00
|
|
|
"""Output a Python-style requirement string as RPM dep"""
|
|
|
|
print_err(f'Handling {requirement_str} from {source}')
|
2019-07-02 14:53:05 +00:00
|
|
|
|
|
|
|
try:
|
2019-07-17 13:44:22 +00:00
|
|
|
requirement = Requirement(requirement_str)
|
2021-07-20 13:21:35 +00:00
|
|
|
except InvalidRequirement:
|
2021-07-22 10:01:48 +00:00
|
|
|
hint = guess_reason_for_invalid_requirement(requirement_str)
|
|
|
|
message = f'Requirement {requirement_str!r} from {source} is invalid.'
|
|
|
|
if hint:
|
|
|
|
message += f' Hint: {hint}'
|
|
|
|
raise ValueError(message)
|
2021-07-20 13:21:35 +00:00
|
|
|
|
|
|
|
if requirement.url:
|
2019-07-17 13:44:22 +00:00
|
|
|
print_err(
|
2021-07-20 13:21:35 +00:00
|
|
|
f'WARNING: Simplifying {requirement_str!r} to {requirement.name!r}.'
|
2019-07-17 13:44:22 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
name = canonicalize_name(requirement.name)
|
2019-10-25 15:07:29 +00:00
|
|
|
if (requirement.marker is not None and
|
2022-08-30 20:22:59 +00:00
|
|
|
not self.evaluate_all_environments(requirement)):
|
2019-07-17 13:44:22 +00:00
|
|
|
print_err(f'Ignoring alien requirement:', requirement_str)
|
2023-04-27 07:47:47 +00:00
|
|
|
self.ignored_alien_requirements.append(requirement_str)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Handle self-referencing requirements
|
|
|
|
if package_name and canonicalize_name(package_name) == name:
|
|
|
|
# Self-referential extras need to be handled specially
|
|
|
|
if requirement.extras:
|
|
|
|
if not (requirement.extras <= self.extras): # only handle it if needed
|
|
|
|
# let all further requirements know we want those extras
|
|
|
|
self.add_extras(*requirement.extras)
|
|
|
|
# re-add all of the alien requirements ignored in the past
|
|
|
|
# they might no longer be alien now
|
|
|
|
self.readd_ignored_alien_requirements(package_name=package_name)
|
|
|
|
else:
|
|
|
|
print_err(f'Ignoring self-referential requirement without extras:', requirement_str)
|
2019-07-17 13:44:22 +00:00
|
|
|
return
|
|
|
|
|
2021-10-16 09:16:15 +00:00
|
|
|
# We need to always accept pre-releases as satisfying the requirement
|
|
|
|
# Otherwise e.g. installed cffi version 1.15.0rc2 won't even satisfy the requirement for "cffi"
|
|
|
|
# https://bugzilla.redhat.com/show_bug.cgi?id=2014639#c3
|
|
|
|
requirement.specifier.prereleases = True
|
|
|
|
|
2019-08-10 12:34:24 +00:00
|
|
|
try:
|
2020-08-14 12:57:40 +00:00
|
|
|
# TODO: check if requirements with extras are satisfied
|
2019-08-10 12:34:24 +00:00
|
|
|
installed = self.get_installed_version(requirement.name)
|
2021-02-02 22:03:27 +00:00
|
|
|
except importlib.metadata.PackageNotFoundError:
|
2019-08-10 12:34:24 +00:00
|
|
|
print_err(f'Requirement not satisfied: {requirement_str}')
|
|
|
|
installed = None
|
2019-07-17 13:44:22 +00:00
|
|
|
if installed and installed in requirement.specifier:
|
|
|
|
print_err(f'Requirement satisfied: {requirement_str}')
|
|
|
|
print_err(f' (installed: {requirement.name} {installed})')
|
2020-08-14 12:57:40 +00:00
|
|
|
if requirement.extras:
|
|
|
|
print_err(f' (extras are currently not checked)')
|
2019-07-17 13:44:22 +00:00
|
|
|
else:
|
|
|
|
self.missing_requirements = True
|
|
|
|
|
2020-08-14 12:57:40 +00:00
|
|
|
if self.generate_extras:
|
2021-04-07 14:12:40 +00:00
|
|
|
extra_names = [f'{name}[{extra.lower()}]' for extra in sorted(requirement.extras)]
|
2019-07-17 13:44:22 +00:00
|
|
|
else:
|
2020-08-14 12:57:40 +00:00
|
|
|
extra_names = []
|
|
|
|
|
|
|
|
for name in [name] + extra_names:
|
|
|
|
together = []
|
|
|
|
for specifier in sorted(
|
|
|
|
requirement.specifier,
|
|
|
|
key=lambda s: (s.operator, s.version),
|
|
|
|
):
|
|
|
|
if not VERSION_RE.fullmatch(str(specifier.version)):
|
|
|
|
raise ValueError(
|
|
|
|
f'Unknown character in version: {specifier.version}. '
|
2021-07-20 13:21:35 +00:00
|
|
|
+ '(This might be a bug in pyproject-rpm-macros.)',
|
2020-08-14 12:57:40 +00:00
|
|
|
)
|
2020-09-09 04:47:51 +00:00
|
|
|
together.append(convert(python3dist(name, python3_pkgversion=self.python3_pkgversion),
|
2021-06-29 10:36:21 +00:00
|
|
|
specifier.operator, specifier.version))
|
2020-08-14 12:57:40 +00:00
|
|
|
if len(together) == 0:
|
2023-03-31 16:57:54 +00:00
|
|
|
dep = python3dist(name, python3_pkgversion=self.python3_pkgversion)
|
|
|
|
self.output_lines.append(dep)
|
2020-08-14 12:57:40 +00:00
|
|
|
elif len(together) == 1:
|
2023-03-31 16:57:54 +00:00
|
|
|
self.output_lines.append(together[0])
|
2020-08-14 12:57:40 +00:00
|
|
|
else:
|
2023-03-31 16:57:54 +00:00
|
|
|
self.output_lines.append(f"({' with '.join(together)})")
|
2019-07-17 13:44:22 +00:00
|
|
|
|
|
|
|
def check(self, *, source=None):
|
|
|
|
"""End current pass if any unsatisfied dependencies were output"""
|
|
|
|
if self.missing_requirements:
|
|
|
|
print_err(f'Exiting dependency generation pass: {source}')
|
|
|
|
raise EndPass(source)
|
|
|
|
|
2021-07-20 13:21:35 +00:00
|
|
|
def extend(self, requirement_strs, **kwargs):
|
2019-07-17 13:44:22 +00:00
|
|
|
"""add() several requirements"""
|
|
|
|
for req_str in requirement_strs:
|
2021-07-20 13:21:35 +00:00
|
|
|
self.add(req_str, **kwargs)
|
2019-07-02 14:53:05 +00:00
|
|
|
|
2023-04-27 07:47:47 +00:00
|
|
|
def readd_ignored_alien_requirements(self, **kwargs):
|
|
|
|
"""add() previously ignored alien requirements again."""
|
|
|
|
requirements, self.ignored_alien_requirements = self.ignored_alien_requirements, []
|
|
|
|
kwargs.setdefault('source', 'Previously ignored alien requirements')
|
|
|
|
self.extend(requirements, **kwargs)
|
|
|
|
|
2019-10-25 15:07:29 +00:00
|
|
|
|
2022-05-12 10:42:25 +00:00
|
|
|
def toml_load(opened_binary_file):
|
2019-07-17 13:44:22 +00:00
|
|
|
try:
|
2022-05-12 10:42:25 +00:00
|
|
|
# tomllib is in the standard library since 3.11.0b1
|
2023-05-26 12:21:14 +00:00
|
|
|
import tomllib
|
2022-05-12 10:42:25 +00:00
|
|
|
except ImportError:
|
2021-02-02 22:16:59 +00:00
|
|
|
try:
|
2023-05-26 12:21:14 +00:00
|
|
|
import tomli as tomllib
|
2021-02-02 22:16:59 +00:00
|
|
|
except ImportError as e:
|
|
|
|
print_err('Import error:', e)
|
|
|
|
# already echoed by the %pyproject_buildrequires macro
|
|
|
|
sys.exit(0)
|
2023-05-26 12:21:14 +00:00
|
|
|
return tomllib.load(opened_binary_file)
|
2022-05-12 10:42:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_backend(requirements):
|
|
|
|
try:
|
|
|
|
f = open('pyproject.toml', 'rb')
|
|
|
|
except FileNotFoundError:
|
|
|
|
pyproject_data = {}
|
|
|
|
else:
|
2019-07-17 13:44:22 +00:00
|
|
|
with f:
|
2022-05-12 10:42:25 +00:00
|
|
|
pyproject_data = toml_load(f)
|
2019-07-17 13:44:22 +00:00
|
|
|
|
2019-10-25 15:07:29 +00:00
|
|
|
buildsystem_data = pyproject_data.get('build-system', {})
|
2019-07-17 13:44:22 +00:00
|
|
|
requirements.extend(
|
2019-10-25 15:07:29 +00:00
|
|
|
buildsystem_data.get('requires', ()),
|
2019-07-17 13:44:22 +00:00
|
|
|
source='build-system.requires',
|
|
|
|
)
|
|
|
|
|
|
|
|
backend_name = buildsystem_data.get('build-backend')
|
|
|
|
if not backend_name:
|
Handle backends with colon, fallback to setuptools.build_meta:__legacy__
Falling back to setuptools.build_meta:__legacy__ is the standard behavior,
not setuptools.build_meta. See PEP 517:
https://www.python.org/dev/peps/pep-0517/
> If the pyproject.toml file is absent, or the build-backend key is missing,
> the source tree is not using this specification, and tools should revert
> to the legacy behaviour of running setup.py (either directly, or by
> implicitly invoking the setuptools.build_meta:__legacy__ backend).
Falling back to setuptools.build_meta had very similar results so far.,
but the behavior might change in the feature.
While working on this, I have uncovered a problem in our code.
It was not able to handle backends with ":". Looking at PEP 517 again:
> build-backend is a string naming a Python object that will be used to
> perform the build. This is formatted following the same module:object syntax
> as a setuptools entry point. For instance, if the string is "flit.api:main",
> this object would be looked up by executing the equivalent of:
>
> import flit.api
> backend = flit.api.main
>
> It's also legal to leave out the :object part, e.g.
>
> build-backend = "flit.api"
>
> which acts like:
>
> import flit.api
> backend = flit.api
We now handle such cases properly. Witch the change of the default backend,
we also test a backend with colon in our tests.
2020-02-05 12:10:51 +00:00
|
|
|
# https://www.python.org/dev/peps/pep-0517/:
|
|
|
|
# If the pyproject.toml file is absent, or the build-backend key is
|
|
|
|
# missing, the source tree is not using this specification, and tools
|
|
|
|
# should revert to the legacy behaviour of running setup.py
|
|
|
|
# (either directly, or by implicitly invoking the [following] backend).
|
2021-07-14 15:13:34 +00:00
|
|
|
# If setup.py is also not present program will mimick pip's behavior
|
|
|
|
# and end with an error.
|
|
|
|
if not os.path.exists('setup.py'):
|
|
|
|
raise FileNotFoundError('File "setup.py" not found for legacy project.')
|
Handle backends with colon, fallback to setuptools.build_meta:__legacy__
Falling back to setuptools.build_meta:__legacy__ is the standard behavior,
not setuptools.build_meta. See PEP 517:
https://www.python.org/dev/peps/pep-0517/
> If the pyproject.toml file is absent, or the build-backend key is missing,
> the source tree is not using this specification, and tools should revert
> to the legacy behaviour of running setup.py (either directly, or by
> implicitly invoking the setuptools.build_meta:__legacy__ backend).
Falling back to setuptools.build_meta had very similar results so far.,
but the behavior might change in the feature.
While working on this, I have uncovered a problem in our code.
It was not able to handle backends with ":". Looking at PEP 517 again:
> build-backend is a string naming a Python object that will be used to
> perform the build. This is formatted following the same module:object syntax
> as a setuptools entry point. For instance, if the string is "flit.api:main",
> this object would be looked up by executing the equivalent of:
>
> import flit.api
> backend = flit.api.main
>
> It's also legal to leave out the :object part, e.g.
>
> build-backend = "flit.api"
>
> which acts like:
>
> import flit.api
> backend = flit.api
We now handle such cases properly. Witch the change of the default backend,
we also test a backend with colon in our tests.
2020-02-05 12:10:51 +00:00
|
|
|
backend_name = 'setuptools.build_meta:__legacy__'
|
|
|
|
|
2021-02-02 22:31:44 +00:00
|
|
|
# Note: For projects without pyproject.toml, this was already echoed
|
|
|
|
# by the %pyproject_buildrequires macro, but this also handles cases
|
|
|
|
# with pyproject.toml without a specified build backend.
|
|
|
|
# If the default requirements change, also change them in the macro!
|
2019-10-25 15:07:29 +00:00
|
|
|
requirements.add('setuptools >= 40.8', source='default build backend')
|
|
|
|
requirements.add('wheel', source='default build backend')
|
2019-07-17 13:44:22 +00:00
|
|
|
|
|
|
|
requirements.check(source='build backend')
|
|
|
|
|
|
|
|
backend_path = buildsystem_data.get('backend-path')
|
|
|
|
if backend_path:
|
2020-10-05 22:30:45 +00:00
|
|
|
# PEP 517 example shows the path as a list, but some projects don't follow that
|
|
|
|
if isinstance(backend_path, str):
|
|
|
|
backend_path = [backend_path]
|
|
|
|
sys.path = backend_path + sys.path
|
2019-07-17 13:44:22 +00:00
|
|
|
|
Handle backends with colon, fallback to setuptools.build_meta:__legacy__
Falling back to setuptools.build_meta:__legacy__ is the standard behavior,
not setuptools.build_meta. See PEP 517:
https://www.python.org/dev/peps/pep-0517/
> If the pyproject.toml file is absent, or the build-backend key is missing,
> the source tree is not using this specification, and tools should revert
> to the legacy behaviour of running setup.py (either directly, or by
> implicitly invoking the setuptools.build_meta:__legacy__ backend).
Falling back to setuptools.build_meta had very similar results so far.,
but the behavior might change in the feature.
While working on this, I have uncovered a problem in our code.
It was not able to handle backends with ":". Looking at PEP 517 again:
> build-backend is a string naming a Python object that will be used to
> perform the build. This is formatted following the same module:object syntax
> as a setuptools entry point. For instance, if the string is "flit.api:main",
> this object would be looked up by executing the equivalent of:
>
> import flit.api
> backend = flit.api.main
>
> It's also legal to leave out the :object part, e.g.
>
> build-backend = "flit.api"
>
> which acts like:
>
> import flit.api
> backend = flit.api
We now handle such cases properly. Witch the change of the default backend,
we also test a backend with colon in our tests.
2020-02-05 12:10:51 +00:00
|
|
|
module_name, _, object_name = backend_name.partition(":")
|
|
|
|
backend_module = importlib.import_module(module_name)
|
|
|
|
|
|
|
|
if object_name:
|
|
|
|
return getattr(backend_module, object_name)
|
|
|
|
|
|
|
|
return backend_module
|
2019-07-02 14:53:05 +00:00
|
|
|
|
|
|
|
|
2019-07-17 13:44:22 +00:00
|
|
|
def generate_build_requirements(backend, requirements):
|
2019-10-25 15:07:29 +00:00
|
|
|
get_requires = getattr(backend, 'get_requires_for_build_wheel', None)
|
2019-07-17 13:44:22 +00:00
|
|
|
if get_requires:
|
2023-05-20 00:37:27 +00:00
|
|
|
new_reqs = get_requires(config_settings=requirements.config_settings)
|
2019-07-17 13:44:22 +00:00
|
|
|
requirements.extend(new_reqs, source='get_requires_for_build_wheel')
|
2020-09-23 09:10:30 +00:00
|
|
|
requirements.check(source='get_requires_for_build_wheel')
|
2019-07-02 14:53:05 +00:00
|
|
|
|
|
|
|
|
2023-04-27 07:47:47 +00:00
|
|
|
def parse_metadata_file(metadata_file):
|
|
|
|
return email.parser.Parser().parse(metadata_file, headersonly=True)
|
|
|
|
|
|
|
|
|
|
|
|
def requires_from_parsed_metadata_file(message):
|
2022-04-22 14:48:09 +00:00
|
|
|
return {k: message.get_all(k, ()) for k in ('Requires', 'Requires-Dist')}
|
|
|
|
|
|
|
|
|
2023-04-27 07:47:47 +00:00
|
|
|
def package_name_from_parsed_metadata_file(message):
|
|
|
|
return message.get('name')
|
|
|
|
|
|
|
|
|
|
|
|
def package_name_and_requires_from_metadata_file(metadata_file):
|
|
|
|
message = parse_metadata_file(metadata_file)
|
|
|
|
package_name = package_name_from_parsed_metadata_file(message)
|
|
|
|
requires = requires_from_parsed_metadata_file(message)
|
|
|
|
return package_name, requires
|
|
|
|
|
|
|
|
|
2022-04-22 14:48:09 +00:00
|
|
|
def generate_run_requirements_hook(backend, requirements):
|
2019-10-25 15:07:29 +00:00
|
|
|
hook_name = 'prepare_metadata_for_build_wheel'
|
|
|
|
prepare_metadata = getattr(backend, hook_name, None)
|
2019-07-17 16:16:16 +00:00
|
|
|
if not prepare_metadata:
|
|
|
|
raise ValueError(
|
2022-04-22 14:48:09 +00:00
|
|
|
'The build backend cannot provide build metadata '
|
|
|
|
'(incl. runtime requirements) before build. '
|
|
|
|
'Use the provisional -w flag to build the wheel and parse the metadata from it, '
|
|
|
|
'or use the -R flag not to generate runtime dependencies.'
|
2019-07-17 16:16:16 +00:00
|
|
|
)
|
2023-05-20 00:37:27 +00:00
|
|
|
dir_basename = prepare_metadata('.', config_settings=requirements.config_settings)
|
2022-04-22 14:48:09 +00:00
|
|
|
with open(dir_basename + '/METADATA') as metadata_file:
|
2023-04-27 07:47:47 +00:00
|
|
|
name, requires = package_name_and_requires_from_metadata_file(metadata_file)
|
|
|
|
for key, req in requires.items():
|
|
|
|
requirements.extend(req,
|
|
|
|
package_name=name,
|
|
|
|
source=f'hook generated metadata: {key} ({name})')
|
2022-04-22 14:48:09 +00:00
|
|
|
|
|
|
|
|
|
|
|
def find_built_wheel(wheeldir):
|
|
|
|
wheels = glob.glob(os.path.join(wheeldir, '*.whl'))
|
|
|
|
if not wheels:
|
|
|
|
return None
|
|
|
|
if len(wheels) > 1:
|
|
|
|
raise RuntimeError('Found multiple wheels in %{_pyproject_wheeldir}, '
|
|
|
|
'this is not supported with %pyproject_buildrequires -w.')
|
|
|
|
return wheels[0]
|
|
|
|
|
|
|
|
|
|
|
|
def generate_run_requirements_wheel(backend, requirements, wheeldir):
|
|
|
|
# Reuse the wheel from the previous round of %pyproject_buildrequires (if it exists)
|
|
|
|
wheel = find_built_wheel(wheeldir)
|
|
|
|
if not wheel:
|
|
|
|
import pyproject_wheel
|
2023-05-20 00:37:27 +00:00
|
|
|
returncode = pyproject_wheel.build_wheel(
|
|
|
|
wheeldir=wheeldir,
|
|
|
|
stdout=sys.stderr,
|
|
|
|
config_settings=requirements.config_settings,
|
|
|
|
)
|
2022-04-22 14:48:09 +00:00
|
|
|
if returncode != 0:
|
|
|
|
raise RuntimeError('Failed to build the wheel for %pyproject_buildrequires -w.')
|
|
|
|
wheel = find_built_wheel(wheeldir)
|
|
|
|
if not wheel:
|
|
|
|
raise RuntimeError('Cannot locate the built wheel for %pyproject_buildrequires -w.')
|
|
|
|
|
|
|
|
print_err(f'Reading metadata from {wheel}')
|
|
|
|
with zipfile.ZipFile(wheel) as wheelfile:
|
|
|
|
for name in wheelfile.namelist():
|
|
|
|
if name.count('/') == 1 and name.endswith('.dist-info/METADATA'):
|
|
|
|
with io.TextIOWrapper(wheelfile.open(name), encoding='utf-8') as metadata_file:
|
2023-04-27 07:47:47 +00:00
|
|
|
name, requires = package_name_and_requires_from_metadata_file(metadata_file)
|
|
|
|
for key, req in requires.items():
|
|
|
|
requirements.extend(req,
|
|
|
|
package_name=name,
|
|
|
|
source=f'built wheel metadata: {key} ({name})')
|
2022-04-22 14:48:09 +00:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise RuntimeError('Could not find *.dist-info/METADATA in built wheel.')
|
|
|
|
|
|
|
|
|
|
|
|
def generate_run_requirements(backend, requirements, *, build_wheel, wheeldir):
|
|
|
|
if build_wheel:
|
|
|
|
generate_run_requirements_wheel(backend, requirements, wheeldir)
|
|
|
|
else:
|
|
|
|
generate_run_requirements_hook(backend, requirements)
|
2019-07-17 16:16:16 +00:00
|
|
|
|
|
|
|
|
2019-07-26 12:34:21 +00:00
|
|
|
def generate_tox_requirements(toxenv, requirements):
|
2020-11-03 10:29:50 +00:00
|
|
|
toxenv = ','.join(toxenv)
|
2021-03-29 11:46:30 +00:00
|
|
|
requirements.add('tox-current-env >= 0.0.6', source='tox itself')
|
2019-08-13 12:42:21 +00:00
|
|
|
requirements.check(source='tox itself')
|
2021-03-29 11:46:30 +00:00
|
|
|
with tempfile.NamedTemporaryFile('r') as deps, \
|
|
|
|
tempfile.NamedTemporaryFile('r') as extras, \
|
|
|
|
tempfile.NamedTemporaryFile('r') as provision:
|
2019-08-13 12:42:21 +00:00
|
|
|
r = subprocess.run(
|
2020-09-30 20:12:03 +00:00
|
|
|
[sys.executable, '-m', 'tox',
|
|
|
|
'--print-deps-to', deps.name,
|
|
|
|
'--print-extras-to', extras.name,
|
2021-03-29 11:46:30 +00:00
|
|
|
'--no-provision', provision.name,
|
2022-02-07 11:02:50 +00:00
|
|
|
'-q', '-r', '-e', toxenv],
|
When tox fails, print tox output before failing
Previously, it wasn't possible to see why tox failed:
...
Requirement satisfied: tox-current-env >= 0.0.2
(installed: tox-current-env 0.0.2)
Traceback (most recent call last):
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 269, in main
generate_requires(
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 221, in generate_requires
generate_tox_requirements(toxenv, requirements)
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 184, in generate_tox_requirements
r = subprocess.run(
File "/usr/lib64/python3.8/subprocess.py", line 512, in run
raise CalledProcessError(retcode, process.args,
subprocess.CalledProcessError: Command '['tox', '--print-deps-to-file', '/tmp/tmp96smu4rv', '-qre', 'py38']' returned non-zero exit status 1.
Now it is:
...
Requirement satisfied: tox-current-env >= 0.0.2
(installed: tox-current-env 0.0.2)
ERROR: tox config file (either pyproject.toml, tox.ini, setup.cfg) not found
Traceback (most recent call last):
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 270, in main
generate_requires(
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 222, in generate_requires
generate_tox_requirements(toxenv, requirements)
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 193, in generate_tox_requirements
r.check_returncode()
File "/usr/lib64/python3.8/subprocess.py", line 444, in check_returncode
raise CalledProcessError(self.returncode, self.args, self.stdout,
subprocess.CalledProcessError: Command '['tox', '--print-deps-to-file', '/tmp/tmpwp8sffv1', '-qre', 'py38']' returned non-zero exit status 1.
Inspired by https://src.fedoraproject.org/rpms/python-chaospy/pull-request/1#comment-32750
2019-10-25 14:51:01 +00:00
|
|
|
check=False,
|
2019-08-13 12:42:21 +00:00
|
|
|
encoding='utf-8',
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
)
|
|
|
|
if r.stdout:
|
When tox fails, print tox output before failing
Previously, it wasn't possible to see why tox failed:
...
Requirement satisfied: tox-current-env >= 0.0.2
(installed: tox-current-env 0.0.2)
Traceback (most recent call last):
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 269, in main
generate_requires(
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 221, in generate_requires
generate_tox_requirements(toxenv, requirements)
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 184, in generate_tox_requirements
r = subprocess.run(
File "/usr/lib64/python3.8/subprocess.py", line 512, in run
raise CalledProcessError(retcode, process.args,
subprocess.CalledProcessError: Command '['tox', '--print-deps-to-file', '/tmp/tmp96smu4rv', '-qre', 'py38']' returned non-zero exit status 1.
Now it is:
...
Requirement satisfied: tox-current-env >= 0.0.2
(installed: tox-current-env 0.0.2)
ERROR: tox config file (either pyproject.toml, tox.ini, setup.cfg) not found
Traceback (most recent call last):
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 270, in main
generate_requires(
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 222, in generate_requires
generate_tox_requirements(toxenv, requirements)
File "/usr/lib/rpm/redhat/pyproject_buildrequires.py", line 193, in generate_tox_requirements
r.check_returncode()
File "/usr/lib64/python3.8/subprocess.py", line 444, in check_returncode
raise CalledProcessError(self.returncode, self.args, self.stdout,
subprocess.CalledProcessError: Command '['tox', '--print-deps-to-file', '/tmp/tmpwp8sffv1', '-qre', 'py38']' returned non-zero exit status 1.
Inspired by https://src.fedoraproject.org/rpms/python-chaospy/pull-request/1#comment-32750
2019-10-25 14:51:01 +00:00
|
|
|
print_err(r.stdout, end='')
|
2021-03-29 11:46:30 +00:00
|
|
|
|
|
|
|
provision_content = provision.read()
|
|
|
|
if provision_content and r.returncode != 0:
|
|
|
|
provision_requires = json.loads(provision_content)
|
2023-09-25 12:32:43 +00:00
|
|
|
if provision_requires.get('minversion') is not None:
|
2021-03-29 11:46:30 +00:00
|
|
|
requirements.add(f'tox >= {provision_requires["minversion"]}',
|
|
|
|
source='tox provision (minversion)')
|
|
|
|
if 'requires' in provision_requires:
|
|
|
|
requirements.extend(provision_requires["requires"],
|
|
|
|
source='tox provision (requires)')
|
|
|
|
requirements.check(source='tox provision') # this terminates the script
|
|
|
|
raise RuntimeError(
|
|
|
|
'Dependencies requested by tox provisioning appear installed, '
|
|
|
|
'but tox disagreed.')
|
|
|
|
else:
|
|
|
|
r.check_returncode()
|
2020-03-02 10:56:15 +00:00
|
|
|
|
2020-09-30 20:12:03 +00:00
|
|
|
deplines = deps.read().splitlines()
|
2021-07-22 10:01:48 +00:00
|
|
|
packages = convert_requirements_txt(deplines)
|
2020-09-30 20:12:03 +00:00
|
|
|
requirements.add_extras(*extras.read().splitlines())
|
2020-03-02 10:56:15 +00:00
|
|
|
requirements.extend(packages,
|
2019-08-12 15:29:26 +00:00
|
|
|
source=f'tox --print-deps-only: {toxenv}')
|
2019-07-26 12:34:21 +00:00
|
|
|
|
|
|
|
|
2020-07-22 16:13:50 +00:00
|
|
|
def python3dist(name, op=None, version=None, python3_pkgversion="3"):
|
|
|
|
prefix = f"python{python3_pkgversion}dist"
|
|
|
|
|
2019-07-17 13:44:22 +00:00
|
|
|
if op is None:
|
|
|
|
if version is not None:
|
|
|
|
raise AssertionError('op and version go together')
|
2020-07-22 16:13:50 +00:00
|
|
|
return f'{prefix}({name})'
|
2019-07-17 13:44:22 +00:00
|
|
|
else:
|
2020-07-22 16:13:50 +00:00
|
|
|
return f'{prefix}({name}) {op} {version}'
|
2019-07-17 13:44:22 +00:00
|
|
|
|
|
|
|
|
2019-07-18 08:50:13 +00:00
|
|
|
def generate_requires(
|
2022-04-22 14:48:09 +00:00
|
|
|
*, include_runtime=False, build_wheel=False, wheeldir=None, toxenv=None, extras=None,
|
2021-02-02 22:03:27 +00:00
|
|
|
get_installed_version=importlib.metadata.version, # for dep injection
|
2023-03-31 16:57:54 +00:00
|
|
|
generate_extras=False, python3_pkgversion="3", requirement_files=None, use_build_system=True,
|
2023-05-20 00:37:27 +00:00
|
|
|
output, config_settings=None,
|
2019-07-18 08:50:13 +00:00
|
|
|
):
|
2019-08-10 12:34:24 +00:00
|
|
|
"""Generate the BuildRequires for the project in the current directory
|
|
|
|
|
2023-03-31 16:57:54 +00:00
|
|
|
The generated BuildRequires are written to the provided output.
|
|
|
|
|
2019-08-10 12:34:24 +00:00
|
|
|
This is the main Python entry point.
|
|
|
|
"""
|
2020-07-22 16:13:50 +00:00
|
|
|
requirements = Requirements(
|
2020-09-30 20:31:07 +00:00
|
|
|
get_installed_version, extras=extras or [],
|
2020-08-14 12:57:40 +00:00
|
|
|
generate_extras=generate_extras,
|
2023-05-20 00:37:27 +00:00
|
|
|
python3_pkgversion=python3_pkgversion,
|
|
|
|
config_settings=config_settings,
|
2020-07-22 16:13:50 +00:00
|
|
|
)
|
2019-07-17 13:44:22 +00:00
|
|
|
|
2019-07-02 14:53:05 +00:00
|
|
|
try:
|
2021-06-28 11:23:19 +00:00
|
|
|
if (include_runtime or toxenv) and not use_build_system:
|
|
|
|
raise ValueError('-N option cannot be used in combination with -r, -e, -t, -x options')
|
|
|
|
if requirement_files:
|
|
|
|
for req_file in requirement_files:
|
2021-07-22 10:01:48 +00:00
|
|
|
requirements.extend(
|
|
|
|
convert_requirements_txt(req_file, pathlib.Path(req_file.name)),
|
2021-07-25 21:35:52 +00:00
|
|
|
source=f'requirements file {req_file.name}'
|
2021-07-22 10:01:48 +00:00
|
|
|
)
|
2021-07-25 21:37:01 +00:00
|
|
|
requirements.check(source='all requirements files')
|
2021-06-28 11:23:19 +00:00
|
|
|
if use_build_system:
|
|
|
|
backend = get_backend(requirements)
|
|
|
|
generate_build_requirements(backend, requirements)
|
2020-11-03 10:29:50 +00:00
|
|
|
if toxenv:
|
2019-07-26 12:34:21 +00:00
|
|
|
include_runtime = True
|
|
|
|
generate_tox_requirements(toxenv, requirements)
|
2019-07-17 16:16:16 +00:00
|
|
|
if include_runtime:
|
2022-04-22 14:48:09 +00:00
|
|
|
generate_run_requirements(backend, requirements, build_wheel=build_wheel, wheeldir=wheeldir)
|
2019-07-17 13:44:22 +00:00
|
|
|
except EndPass:
|
2019-07-17 14:25:20 +00:00
|
|
|
return
|
2023-03-31 16:57:54 +00:00
|
|
|
finally:
|
|
|
|
output.write_text(os.linesep.join(requirements.output_lines) + os.linesep)
|
2019-07-17 13:44:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def main(argv):
|
|
|
|
parser = argparse.ArgumentParser(
|
2022-10-10 09:17:08 +00:00
|
|
|
description='Generate BuildRequires for a Python project.',
|
|
|
|
prog='%pyproject_buildrequires',
|
|
|
|
add_help=False,
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'--help', action='help',
|
|
|
|
default=argparse.SUPPRESS,
|
|
|
|
help=argparse.SUPPRESS,
|
2019-07-17 13:44:22 +00:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2022-01-14 15:26:40 +00:00
|
|
|
'-r', '--runtime', action='store_true', default=True,
|
2022-10-10 09:17:08 +00:00
|
|
|
help=argparse.SUPPRESS, # Generate run-time requirements (backwards-compatibility only)
|
2022-01-14 15:26:40 +00:00
|
|
|
)
|
2022-04-22 14:48:09 +00:00
|
|
|
parser.add_argument(
|
2022-10-10 10:59:26 +00:00
|
|
|
'--generate-extras', action='store_true',
|
|
|
|
help=argparse.SUPPRESS,
|
2022-04-22 14:48:09 +00:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2022-10-10 10:59:26 +00:00
|
|
|
'-p', '--python3_pkgversion', metavar='PYTHON3_PKGVERSION',
|
|
|
|
default="3", help=argparse.SUPPRESS,
|
2022-04-22 14:48:09 +00:00
|
|
|
)
|
2023-03-31 16:57:54 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--output', type=pathlib.Path, required=True, help=argparse.SUPPRESS,
|
|
|
|
)
|
2022-01-14 15:26:40 +00:00
|
|
|
parser.add_argument(
|
2022-10-10 10:59:26 +00:00
|
|
|
'--wheeldir', metavar='PATH', default=None,
|
|
|
|
help=argparse.SUPPRESS,
|
2019-07-17 13:44:22 +00:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
2022-10-10 10:59:26 +00:00
|
|
|
'-x', '--extras', metavar='EXTRAS', action='append',
|
|
|
|
help='comma separated list of "extras" for runtime requirements '
|
|
|
|
'(e.g. -x testing,feature-x) (implies --runtime, can be repeated)',
|
2019-08-13 12:42:21 +00:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
'-t', '--tox', action='store_true',
|
2019-07-26 12:34:21 +00:00
|
|
|
help=('generate test tequirements from tox environment '
|
|
|
|
'(implies --runtime)'),
|
2019-07-17 13:44:22 +00:00
|
|
|
)
|
2019-07-18 08:50:13 +00:00
|
|
|
parser.add_argument(
|
2022-10-10 10:59:26 +00:00
|
|
|
'-e', '--toxenv', metavar='TOXENVS', action='append',
|
|
|
|
help=('specify tox environments (comma separated and/or repeated)'
|
|
|
|
'(implies --tox)'),
|
2019-07-18 08:50:13 +00:00
|
|
|
)
|
2020-08-14 12:57:40 +00:00
|
|
|
parser.add_argument(
|
2022-10-10 10:59:26 +00:00
|
|
|
'-w', '--wheel', action='store_true', default=False,
|
|
|
|
help=('Generate run-time requirements by building the wheel '
|
|
|
|
'(useful for build backends without the prepare_metadata_for_build_wheel hook)'),
|
2020-08-14 12:57:40 +00:00
|
|
|
)
|
2020-07-22 16:13:50 +00:00
|
|
|
parser.add_argument(
|
2022-10-10 10:59:26 +00:00
|
|
|
'-R', '--no-runtime', action='store_false', dest='runtime',
|
|
|
|
help="Don't generate run-time requirements (implied by -N)",
|
2020-07-22 16:13:50 +00:00
|
|
|
)
|
2021-06-28 11:23:19 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'-N', '--no-use-build-system', dest='use_build_system',
|
|
|
|
action='store_false', help='Use -N to indicate that project does not use any build system',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2022-10-10 09:17:08 +00:00
|
|
|
'requirement_files', nargs='*', type=argparse.FileType('r'),
|
|
|
|
metavar='REQUIREMENTS.TXT',
|
2021-06-28 11:23:19 +00:00
|
|
|
help=('Add buildrequires from file'),
|
|
|
|
)
|
2023-05-20 00:37:27 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'-C',
|
|
|
|
dest='config_settings',
|
|
|
|
action='append',
|
|
|
|
help='Configuration settings to pass to the PEP 517 backend',
|
|
|
|
)
|
2019-07-17 13:44:22 +00:00
|
|
|
|
|
|
|
args = parser.parse_args(argv)
|
2019-08-13 12:42:21 +00:00
|
|
|
|
2022-01-14 15:26:40 +00:00
|
|
|
if not args.use_build_system:
|
|
|
|
args.runtime = False
|
|
|
|
|
2022-04-22 14:48:09 +00:00
|
|
|
if args.wheel:
|
|
|
|
if not args.wheeldir:
|
|
|
|
raise ValueError('--wheeldir must be set when -w.')
|
|
|
|
|
2019-08-13 12:05:14 +00:00
|
|
|
if args.toxenv:
|
2019-08-13 12:42:21 +00:00
|
|
|
args.tox = True
|
|
|
|
|
|
|
|
if args.tox:
|
2019-08-13 12:05:14 +00:00
|
|
|
args.runtime = True
|
2020-11-03 10:29:50 +00:00
|
|
|
if not args.toxenv:
|
|
|
|
_default = f'py{sys.version_info.major}{sys.version_info.minor}'
|
|
|
|
args.toxenv = [os.getenv('RPM_TOXENV', _default)]
|
2019-08-13 12:42:21 +00:00
|
|
|
|
2020-07-16 11:35:02 +00:00
|
|
|
if args.extras:
|
|
|
|
args.runtime = True
|
2019-07-17 13:44:22 +00:00
|
|
|
|
|
|
|
try:
|
2019-07-18 08:50:13 +00:00
|
|
|
generate_requires(
|
|
|
|
include_runtime=args.runtime,
|
2022-04-22 14:48:09 +00:00
|
|
|
build_wheel=args.wheel,
|
|
|
|
wheeldir=args.wheeldir,
|
2019-07-26 12:34:21 +00:00
|
|
|
toxenv=args.toxenv,
|
2019-07-18 08:50:13 +00:00
|
|
|
extras=args.extras,
|
2020-08-14 12:57:40 +00:00
|
|
|
generate_extras=args.generate_extras,
|
2020-07-22 16:13:50 +00:00
|
|
|
python3_pkgversion=args.python3_pkgversion,
|
2021-06-28 11:23:19 +00:00
|
|
|
requirement_files=args.requirement_files,
|
|
|
|
use_build_system=args.use_build_system,
|
2023-03-31 16:57:54 +00:00
|
|
|
output=args.output,
|
2023-05-20 00:37:27 +00:00
|
|
|
config_settings=parse_config_settings_args(args.config_settings),
|
2019-07-18 08:50:13 +00:00
|
|
|
)
|
2019-08-13 12:42:21 +00:00
|
|
|
except Exception:
|
2019-07-17 13:44:22 +00:00
|
|
|
# Log the traceback explicitly (it's useful debug info)
|
|
|
|
traceback.print_exc()
|
|
|
|
exit(1)
|
2019-07-02 14:53:05 +00:00
|
|
|
|
|
|
|
|
2019-07-17 13:44:22 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main(sys.argv[1:])
|