Testing proposed upstream changes

https://github.com/rpm-software-management/rpm/pull/1195

Added a new test suite
This commit is contained in:
Tomas Orsava 2020-04-29 17:52:55 +02:00
parent c8249102ec
commit 0a5307e928
12 changed files with 2020 additions and 249 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
/test-sources-2020-04-29.tar.gz
/tests/__pycache__/
/tests/data/scripts_pythondistdeps/usr/

View File

@ -1,7 +1,7 @@
Name: python-rpm-generators Name: python-rpm-generators
Summary: Dependency generators for Python RPMs Summary: Dependency generators for Python RPMs
Version: 11 Version: 11
Release: 4%{?dist} Release: 5%{?dist}
# Originally all those files were part of RPM, so license is kept here # Originally all those files were part of RPM, so license is kept here
License: GPLv2+ License: GPLv2+
@ -45,6 +45,11 @@ install -Dpm0755 -t %{buildroot}%{_rpmconfigdir} pythondistdeps.py
%{_rpmconfigdir}/pythondistdeps.py %{_rpmconfigdir}/pythondistdeps.py
%changelog %changelog
* Wed Apr 29 2020 Tomas Orsava <torsava@redhat.com> - 11-5
- Testing proposed upstream changes:
https://github.com/rpm-software-management/rpm/pull/1195
- Added a new test suite
* Tue Apr 28 2020 Miro Hrončok <mhroncok@redhat.com> - 11-4 * Tue Apr 28 2020 Miro Hrončok <mhroncok@redhat.com> - 11-4
- Don't define global Lua variables from Python generator - Don't define global Lua variables from Python generator

View File

@ -3,6 +3,7 @@
# #
# Copyright 2010 Per Øyvind Karlsen <proyvind@moondrake.org> # Copyright 2010 Per Øyvind Karlsen <proyvind@moondrake.org>
# Copyright 2015 Neal Gompa <ngompa13@gmail.com> # Copyright 2015 Neal Gompa <ngompa13@gmail.com>
# Copyright 2020 SUSE LLC
# #
# This program is free software. It may be redistributed and/or modified under # This program is free software. It may be redistributed and/or modified under
# the terms of the LGPL version 2.1 (or later). # the terms of the LGPL version 2.1 (or later).
@ -10,8 +11,12 @@
# RPM python dependency generator, using .egg-info/.egg-link/.dist-info data # RPM python dependency generator, using .egg-info/.egg-link/.dist-info data
# #
# Please know:
# - Notes from an attempted rewrite from pkg_resources to importlib.metadata in
# 2020 can be found in the message of the commit that added this line.
from __future__ import print_function from __future__ import print_function
from getopt import getopt import argparse
from os.path import basename, dirname, isdir, sep from os.path import basename, dirname, isdir, sep
from sys import argv, stdin, version from sys import argv, stdin, version
from distutils.sysconfig import get_python_lib from distutils.sysconfig import get_python_lib
@ -50,27 +55,29 @@ class RpmVersion():
if self.pre: if self.pre:
rpm_suffix = '~{}'.format(''.join(str(x) for x in self.pre)) rpm_suffix = '~{}'.format(''.join(str(x) for x in self.pre))
elif self.dev: elif self.dev:
rpm_suffix = '~{}'.format(''.join(str(x) for x in self.dev)) rpm_suffix = '~~{}'.format(''.join(str(x) for x in self.dev))
elif self.post: elif self.post:
rpm_suffix = '^post{}'.format(self.post[1]) rpm_suffix = '^post{}'.format(self.post[1])
else: else:
rpm_suffix = '' rpm_suffix = ''
return '{}{}{}'.format(rpm_epoch, rpm_version, rpm_suffix) return '{}{}{}'.format(rpm_epoch, rpm_version, rpm_suffix)
def convert_compatible(name, operator, version_id): def convert_compatible(name, operator, version_id):
if version_id.endswith('.*'): if version_id.endswith('.*'):
print('Invalid requirement: {} {} {}'.format(name, operator, version_id)) print('Invalid requirement: {} {} {}'.format(name, operator, version_id))
exit(65) # os.EX_DATAERR exit(65) # os.EX_DATAERR
version = RpmVersion(version_id) version = RpmVersion(version_id)
if len(version.version) == 1: if len(version.version) == 1:
print('Invalid requirement: {} {} {}'.format(name, operator, version_id)) print('Invalid requirement: {} {} {}'.format(name, operator, version_id))
exit(65) # os.EX_DATAERR exit(65) # os.EX_DATAERR
upper_version = RpmVersion(version_id) upper_version = RpmVersion(version_id)
upper_version.version.pop() upper_version.version.pop()
upper_version.increment() upper_version.increment()
return '({} >= {} with {} < {})'.format( return '({} >= {} with {} < {})'.format(
name, version, name, upper_version) name, version, name, upper_version)
def convert_equal(name, operator, version_id): def convert_equal(name, operator, version_id):
if version_id.endswith('.*'): if version_id.endswith('.*'):
version_id = version_id[:-2] + '.0' version_id = version_id[:-2] + '.0'
@ -78,13 +85,15 @@ def convert_equal(name, operator, version_id):
version = RpmVersion(version_id) version = RpmVersion(version_id)
return '{} = {}'.format(name, version) return '{} = {}'.format(name, version)
def convert_arbitrary_equal(name, operator, version_id): def convert_arbitrary_equal(name, operator, version_id):
if version_id.endswith('.*'): if version_id.endswith('.*'):
print('Invalid requirement: {} {} {}'.format(name, operator, version_id)) print('Invalid requirement: {} {} {}'.format(name, operator, version_id))
exit(65) # os.EX_DATAERR exit(65) # os.EX_DATAERR
version = RpmVersion(version_id) version = RpmVersion(version_id)
return '{} = {}'.format(name, version) return '{} = {}'.format(name, version)
def convert_not_equal(name, operator, version_id): def convert_not_equal(name, operator, version_id):
if version_id.endswith('.*'): if version_id.endswith('.*'):
version_id = version_id[:-2] version_id = version_id[:-2]
@ -96,6 +105,7 @@ def convert_not_equal(name, operator, version_id):
return '({} < {} or {} > {})'.format( return '({} < {} or {} > {})'.format(
name, version, name, lower_version) name, version, name, lower_version)
def convert_ordered(name, operator, version_id): def convert_ordered(name, operator, version_id):
if version_id.endswith('.*'): if version_id.endswith('.*'):
# PEP 440 does not define semantics for prefix matching # PEP 440 does not define semantics for prefix matching
@ -112,275 +122,304 @@ def convert_ordered(name, operator, version_id):
version = RpmVersion(version_id) version = RpmVersion(version_id)
return '{} {} {}'.format(name, operator, version) return '{} {} {}'.format(name, operator, version)
OPERATORS = {'~=': convert_compatible, OPERATORS = {'~=': convert_compatible,
'==': convert_equal, '==': convert_equal,
'===': convert_arbitrary_equal, '===': convert_arbitrary_equal,
'!=': convert_not_equal, '!=': convert_not_equal,
'<=': convert_ordered, '<=': convert_ordered,
'<': convert_ordered, '<': convert_ordered,
'>=': convert_ordered, '>=': convert_ordered,
'>': convert_ordered} '>': convert_ordered}
def convert(name, operator, version_id): def convert(name, operator, version_id):
return OPERATORS[operator](name, operator, version_id) try:
return OPERATORS[operator](name, operator, version_id)
except Exception as exc:
raise RuntimeError("Cannot process Python package version `{}` for name `{}`".
format(version_id, name)) from exc
opts, args = getopt(
argv[1:], 'hPRrCEMmLl:',
['help', 'provides', 'requires', 'recommends', 'conflicts', 'extras', 'majorver-provides', 'majorver-only', 'legacy-provides' , 'legacy'])
Provides = False
Requires = False
Recommends = False
Conflicts = False
Extras = False
Provides_PyMajorVer_Variant = False
PyMajorVer_Deps = False
legacy_Provides = False
legacy = False
def normalize_name(name): def normalize_name(name):
"""https://www.python.org/dev/peps/pep-0503/#normalized-names""" """https://www.python.org/dev/peps/pep-0503/#normalized-names"""
import re import re
return re.sub(r'[-_.]+', '-', name).lower() return re.sub(r'[-_.]+', '-', name).lower()
for o, a in opts:
if o in ('-h', '--help'):
print('-h, --help\tPrint help')
print('-P, --provides\tPrint Provides')
print('-R, --requires\tPrint Requires')
print('-r, --recommends\tPrint Recommends')
print('-C, --conflicts\tPrint Conflicts')
print('-E, --extras\tPrint Extras ')
print('-M, --majorver-provides\tPrint extra Provides with Python major version only')
print('-m, --majorver-only\tPrint Provides/Requires with Python major version only')
print('-L, --legacy-provides\tPrint extra legacy pythonegg Provides')
print('-l, --legacy\tPrint legacy pythonegg Provides/Requires instead')
exit(1)
elif o in ('-P', '--provides'):
Provides = True
elif o in ('-R', '--requires'):
Requires = True
elif o in ('-r', '--recommends'):
Recommends = True
elif o in ('-C', '--conflicts'):
Conflicts = True
elif o in ('-E', '--extras'):
Extras = True
elif o in ('-M', '--majorver-provides'):
Provides_PyMajorVer_Variant = True
elif o in ('-m', '--majorver-only'):
PyMajorVer_Deps = True
elif o in ('-L', '--legacy-provides'):
legacy_Provides = True
elif o in ('-l', '--legacy'):
legacy = True
if Requires: if __name__ == "__main__":
py_abi = True """To allow this script to be importable (and its classes/functions
else: reused), actions are performed only when run as a main script."""
py_abi = False
py_deps = {}
if args:
files = args
else:
files = stdin.readlines()
for f in files: parser = argparse.ArgumentParser(prog=argv[0])
f = f.strip() group = parser.add_mutually_exclusive_group(required=True)
lower = f.lower() group.add_argument('-P', '--provides', action='store_true', help='Print Provides')
name = 'python(abi)' group.add_argument('-R', '--requires', action='store_true', help='Print Requires')
# add dependency based on path, versioned if within versioned python directory group.add_argument('-r', '--recommends', action='store_true', help='Print Recommends')
if py_abi and (lower.endswith('.py') or lower.endswith('.pyc') or lower.endswith('.pyo')): group.add_argument('-C', '--conflicts', action='store_true', help='Print Conflicts')
if name not in py_deps: group.add_argument('-E', '--extras', action='store_true', help='Print Extras')
py_deps[name] = [] group_majorver = parser.add_mutually_exclusive_group()
purelib = get_python_lib(standard_lib=0, plat_specific=0).split(version[:3])[0] group_majorver.add_argument('-M', '--majorver-provides', action='store_true', help='Print extra Provides with Python major version only')
platlib = get_python_lib(standard_lib=0, plat_specific=1).split(version[:3])[0] group_majorver.add_argument('--majorver-provides-versions', action='append',
for lib in (purelib, platlib): help='Print extra Provides with Python major version only for listed '
if lib in f: 'Python VERSIONS (appended or comma separated without spaces, e.g. 2.7,3.9)')
spec = ('==', f.split(lib)[1].split(sep)[0]) parser.add_argument('-m', '--majorver-only', action='store_true', help='Print Provides/Requires with Python major version only')
if spec not in py_deps[name]: parser.add_argument('-n', '--normalized-names-format', action='store',
py_deps[name].append(spec) default="legacy-dots", choices=["pep503", "legacy-dots"],
help='Format of normalized names according to pep503 or legacy format that allows dots [default]')
parser.add_argument('--normalized-names-provide-both', action='store_true',
help='Provide both `pep503` and `legacy-dots` format of normalized names (useful for a transition period)')
parser.add_argument('-L', '--legacy-provides', action='store_true', help='Print extra legacy pythonegg Provides')
parser.add_argument('-l', '--legacy', action='store_true', help='Print legacy pythonegg Provides/Requires instead')
parser.add_argument('files', nargs=argparse.REMAINDER)
args = parser.parse_args()
# XXX: hack to workaround RPM internal dependency generator not passing directories py_abi = args.requires
lower_dir = dirname(lower) py_deps = {}
if lower_dir.endswith('.egg') or \
lower_dir.endswith('.egg-info') or \
lower_dir.endswith('.dist-info'):
lower = lower_dir
f = dirname(f)
# Determine provide, requires, conflicts & recommends based on egg/dist metadata
if lower.endswith('.egg') or \
lower.endswith('.egg-info') or \
lower.endswith('.dist-info'):
# This import is very slow, so only do it if needed
from pkg_resources import Distribution, FileMetadata, PathMetadata, Requirement, parse_version
dist_name = basename(f)
if isdir(f):
path_item = dirname(f)
metadata = PathMetadata(path_item, f)
else:
path_item = f
metadata = FileMetadata(f)
dist = Distribution.from_location(path_item, dist_name, metadata)
# Check if py_version is defined in the metadata file/directory name
if not dist.py_version:
# Try to parse the Python version from the path the metadata
# resides at (e.g. /usr/lib/pythonX.Y/site-packages/...)
import re
res = re.search(r"/python(?P<pyver>\d+\.\d+)/", path_item)
if res:
dist.py_version = res.group('pyver')
else:
warn("Version for {!r} has not been found".format(dist), RuntimeWarning)
continue
# XXX: https://github.com/pypa/setuptools/pull/1275 if args.majorver_provides_versions:
import platform # Go through the arguments (can be specified multiple times),
platform.python_version = lambda: dist.py_version # and parse individual versions (can be comma-separated)
args.majorver_provides_versions = [v for vstring in args.majorver_provides_versions
for v in vstring.split(",")]
# This is the PEP 503 normalized name. # If normalized_names_require_pep503 is True we require the pep503
# It does also convert dots to dashes, unlike dist.key. # normalized name, if it is False we provide the legacy normalized name
# In the current code, we only add additional provides with this. normalized_names_require_pep503 = args.normalized_names_format == "pep503"
# Later, we can start requiring them.
# See https://bugzilla.redhat.com/show_bug.cgi?id=1791530
normalized_name = normalize_name(dist.project_name)
if Provides_PyMajorVer_Variant or PyMajorVer_Deps or legacy_Provides or legacy: # If normalized_names_provide_pep503/legacy is True we provide the
# Get the Python major version # pep503/legacy normalized name, if it is False we don't
pyver_major = dist.py_version.split('.')[0] normalized_names_provide_pep503 = \
if Provides: args.normalized_names_format == "pep503" or args.normalized_names_provide_both
# If egg/dist metadata says package name is python, we provide python(abi) normalized_names_provide_legacy = \
if dist.key == 'python': args.normalized_names_format == "legacy-dots" or args.normalized_names_provide_both
name = 'python(abi)'
if name not in py_deps: # At least one type of normalization must be provided
py_deps[name] = [] assert normalized_names_provide_pep503 or normalized_names_provide_legacy
py_deps[name].append(('==', dist.py_version))
if not legacy or not PyMajorVer_Deps: for f in (args.files or stdin.readlines()):
name = 'python{}dist({})'.format(dist.py_version, dist.key) f = f.strip()
if name not in py_deps: lower = f.lower()
py_deps[name] = [] name = 'python(abi)'
name_ = 'python{}dist({})'.format(dist.py_version, normalized_name) # add dependency based on path, versioned if within versioned python directory
if name_ not in py_deps: if py_abi and (lower.endswith('.py') or lower.endswith('.pyc') or lower.endswith('.pyo')):
py_deps[name_] = [] if name not in py_deps:
if Provides_PyMajorVer_Variant or PyMajorVer_Deps: py_deps[name] = []
pymajor_name = 'python{}dist({})'.format(pyver_major, dist.key) purelib = get_python_lib(standard_lib=0, plat_specific=0).split(version[:3])[0]
if pymajor_name not in py_deps: platlib = get_python_lib(standard_lib=0, plat_specific=1).split(version[:3])[0]
py_deps[pymajor_name] = [] for lib in (purelib, platlib):
pymajor_name_ = 'python{}dist({})'.format(pyver_major, normalized_name) if lib in f:
if pymajor_name_ not in py_deps: spec = ('==', f.split(lib)[1].split(sep)[0])
py_deps[pymajor_name_] = [] if spec not in py_deps[name]:
if legacy or legacy_Provides:
legacy_name = 'pythonegg({})({})'.format(pyver_major, dist.key)
if legacy_name not in py_deps:
py_deps[legacy_name] = []
if dist.version:
version = dist.version
spec = ('==', version)
if spec not in py_deps[name]:
if not legacy:
py_deps[name].append(spec) py_deps[name].append(spec)
if name != name_:
# XXX: hack to workaround RPM internal dependency generator not passing directories
lower_dir = dirname(lower)
if lower_dir.endswith('.egg') or \
lower_dir.endswith('.egg-info') or \
lower_dir.endswith('.dist-info'):
lower = lower_dir
f = dirname(f)
# Determine provide, requires, conflicts & recommends based on egg/dist metadata
if lower.endswith('.egg') or \
lower.endswith('.egg-info') or \
lower.endswith('.dist-info'):
# This import is very slow, so only do it if needed
# - Notes from an attempted rewrite from pkg_resources to
# importlib.metadata in 2020 can be found in the message of
# the commit that added this line.
from pkg_resources import Distribution, FileMetadata, PathMetadata, Requirement, parse_version
dist_name = basename(f)
if isdir(f):
path_item = dirname(f)
metadata = PathMetadata(path_item, f)
else:
path_item = f
metadata = FileMetadata(f)
dist = Distribution.from_location(path_item, dist_name, metadata)
# Check if py_version is defined in the metadata file/directory name
if not dist.py_version:
# Try to parse the Python version from the path the metadata
# resides at (e.g. /usr/lib/pythonX.Y/site-packages/...)
import re
res = re.search(r"/python(?P<pyver>\d+\.\d+)/", path_item)
if res:
dist.py_version = res.group('pyver')
else:
warn("Version for {!r} has not been found".format(dist), RuntimeWarning)
continue
# pkg_resources use platform.python_version to evaluate if a
# dependency is relevant based on environment markers [1],
# e.g. requirement `argparse;python_version<"2.7"`
#
# Since we're running this script on one Python version while
# possibly evaluating packages for different versions, we mock the
# platform.python_version function. Discussed upstream [2].
#
# [1] https://www.python.org/dev/peps/pep-0508/#environment-markers
# [2] https://github.com/pypa/setuptools/pull/1275
import platform
platform.python_version = lambda: dist.py_version
# This is the PEP 503 normalized name.
# It does also convert dots to dashes, unlike dist.key.
# See https://bugzilla.redhat.com/show_bug.cgi?id=1791530
normalized_name = normalize_name(dist.project_name)
if args.majorver_provides or args.majorver_provides_versions or \
args.majorver_only or args.legacy_provides or args.legacy:
# Get the Python major version
pyver_major = dist.py_version.split('.')[0]
if args.provides:
# If egg/dist metadata says package name is python, we provide python(abi)
if dist.key == 'python':
name = 'python(abi)'
if name not in py_deps:
py_deps[name] = []
py_deps[name].append(('==', dist.py_version))
if not args.legacy or not args.majorver_only:
if normalized_names_provide_legacy:
name = 'python{}dist({})'.format(dist.py_version, dist.key)
if name not in py_deps:
py_deps[name] = []
if normalized_names_provide_pep503:
name_ = 'python{}dist({})'.format(dist.py_version, normalized_name)
if name_ not in py_deps:
py_deps[name_] = []
if args.majorver_provides or args.majorver_only or \
(args.majorver_provides_versions and dist.py_version in args.majorver_provides_versions):
if normalized_names_provide_legacy:
pymajor_name = 'python{}dist({})'.format(pyver_major, dist.key)
if pymajor_name not in py_deps:
py_deps[pymajor_name] = []
if normalized_names_provide_pep503:
pymajor_name_ = 'python{}dist({})'.format(pyver_major, normalized_name)
if pymajor_name_ not in py_deps:
py_deps[pymajor_name_] = []
if args.legacy or args.legacy_provides:
legacy_name = 'pythonegg({})({})'.format(pyver_major, dist.key)
if legacy_name not in py_deps:
py_deps[legacy_name] = []
if dist.version:
version = dist.version
spec = ('==', version)
if normalized_names_provide_legacy:
if spec not in py_deps[name]:
py_deps[name].append(spec)
if args.majorver_provides or \
(args.majorver_provides_versions and dist.py_version in args.majorver_provides_versions):
py_deps[pymajor_name].append(spec)
if normalized_names_provide_pep503:
if spec not in py_deps[name_]:
py_deps[name_].append(spec) py_deps[name_].append(spec)
if Provides_PyMajorVer_Variant: if args.majorver_provides or \
py_deps[pymajor_name].append(spec) (args.majorver_provides_versions and dist.py_version in args.majorver_provides_versions):
if pymajor_name != pymajor_name_: py_deps[pymajor_name_].append(spec)
py_deps[pymajor_name_].append(spec) if args.legacy or args.legacy_provides:
if legacy or legacy_Provides: if spec not in py_deps[legacy_name]:
py_deps[legacy_name].append(spec) py_deps[legacy_name].append(spec)
if Requires or (Recommends and dist.extras): if args.requires or (args.recommends and dist.extras):
name = 'python(abi)' name = 'python(abi)'
# If egg/dist metadata says package name is python, we don't add dependency on python(abi) # If egg/dist metadata says package name is python, we don't add dependency on python(abi)
if dist.key == 'python': if dist.key == 'python':
py_abi = False py_abi = False
if name in py_deps: if name in py_deps:
py_deps.pop(name) py_deps.pop(name)
elif py_abi and dist.py_version: elif py_abi and dist.py_version:
if name not in py_deps: if name not in py_deps:
py_deps[name] = [] py_deps[name] = []
spec = ('==', dist.py_version) spec = ('==', dist.py_version)
if spec not in py_deps[name]: if spec not in py_deps[name]:
py_deps[name].append(spec) py_deps[name].append(spec)
deps = dist.requires() deps = dist.requires()
if Recommends: if args.recommends:
depsextras = dist.requires(extras=dist.extras) depsextras = dist.requires(extras=dist.extras)
if not Requires: if not args.requires:
for dep in reversed(depsextras):
if dep in deps:
depsextras.remove(dep)
deps = depsextras
# console_scripts/gui_scripts entry points need pkg_resources from setuptools
if ((dist.get_entry_map('console_scripts') or
dist.get_entry_map('gui_scripts')) and
(lower.endswith('.egg') or
lower.endswith('.egg-info'))):
# stick them first so any more specific requirement overrides it
deps.insert(0, Requirement.parse('setuptools'))
# add requires/recommends based on egg/dist metadata
for dep in deps:
if normalized_names_require_pep503:
dep_normalized_name = normalize_name(dep.project_name)
else:
dep_normalized_name = dep.key
if args.legacy:
name = 'pythonegg({})({})'.format(pyver_major, dep.key)
else:
if args.majorver_only:
name = 'python{}dist({})'.format(pyver_major, dep_normalized_name)
else:
name = 'python{}dist({})'.format(dist.py_version, dep_normalized_name)
for spec in dep.specs:
if name not in py_deps:
py_deps[name] = []
if spec not in py_deps[name]:
py_deps[name].append(spec)
if not dep.specs:
py_deps[name] = []
# Unused, for automatic sub-package generation based on 'extras' from egg/dist metadata
# TODO: implement in rpm later, or...?
if args.extras:
deps = dist.requires()
extras = dist.extras
print(extras)
for extra in extras:
print('%%package\textras-{}'.format(extra))
print('Summary:\t{} extra for {} python package'.format(extra, dist.key))
print('Group:\t\tDevelopment/Python')
depsextras = dist.requires(extras=[extra])
for dep in reversed(depsextras): for dep in reversed(depsextras):
if dep in deps: if dep in deps:
depsextras.remove(dep) depsextras.remove(dep)
deps = depsextras deps = depsextras
# console_scripts/gui_scripts entry points need pkg_resources from setuptools for dep in deps:
if ((dist.get_entry_map('console_scripts') or for spec in dep.specs:
dist.get_entry_map('gui_scripts')) and if spec[0] == '!=':
(lower.endswith('.egg') or print('Conflicts:\t{} {} {}'.format(dep.key, '==', spec[1]))
lower.endswith('.egg-info'))): else:
# stick them first so any more specific requirement overrides it print('Requires:\t{} {} {}'.format(dep.key, spec[0], spec[1]))
deps.insert(0, Requirement.parse('setuptools')) print('%%description\t{}'.format(extra))
# add requires/recommends based on egg/dist metadata print('{} extra for {} python package'.format(extra, dist.key))
for dep in deps: print('%%files\t\textras-{}\n'.format(extra))
if legacy: if args.conflicts:
name = 'pythonegg({})({})'.format(pyver_major, dep.key) # Should we really add conflicts for extras?
else: # Creating a meta package per extra with recommends on, which has
if PyMajorVer_Deps: # the requires/conflicts in stead might be a better solution...
name = 'python{}dist({})'.format(pyver_major, dep.key) for dep in dist.requires(extras=dist.extras):
else: name = dep.key
name = 'python{}dist({})'.format(dist.py_version, dep.key)
for spec in dep.specs:
if name not in py_deps:
py_deps[name] = []
if spec not in py_deps[name]:
py_deps[name].append(spec)
if not dep.specs:
py_deps[name] = []
# Unused, for automatic sub-package generation based on 'extras' from egg/dist metadata
# TODO: implement in rpm later, or...?
if Extras:
deps = dist.requires()
extras = dist.extras
print(extras)
for extra in extras:
print('%%package\textras-{}'.format(extra))
print('Summary:\t{} extra for {} python package'.format(extra, dist.key))
print('Group:\t\tDevelopment/Python')
depsextras = dist.requires(extras=[extra])
for dep in reversed(depsextras):
if dep in deps:
depsextras.remove(dep)
deps = depsextras
for dep in deps:
for spec in dep.specs: for spec in dep.specs:
if spec[0] == '!=': if spec[0] == '!=':
print('Conflicts:\t{} {} {}'.format(dep.key, '==', spec[1])) if name not in py_deps:
else: py_deps[name] = []
print('Requires:\t{} {} {}'.format(dep.key, spec[0], spec[1])) spec = ('==', spec[1])
print('%%description\t{}'.format(extra)) if spec not in py_deps[name]:
print('{} extra for {} python package'.format(extra, dist.key)) py_deps[name].append(spec)
print('%%files\t\textras-{}\n'.format(extra))
if Conflicts: names = list(py_deps.keys())
# Should we really add conflicts for extras? names.sort()
# Creating a meta package per extra with recommends on, which has for name in names:
# the requires/conflicts in stead might be a better solution... if py_deps[name]:
for dep in dist.requires(extras=dist.extras): # Print out versioned provides, requires, recommends, conflicts
name = dep.key spec_list = []
for spec in dep.specs: for spec in py_deps[name]:
if spec[0] == '!=': spec_list.append(convert(name, spec[0], spec[1]))
if name not in py_deps: if len(spec_list) == 1:
py_deps[name] = [] print(spec_list[0])
spec = ('==', spec[1]) else:
if spec not in py_deps[name]: # Sort spec_list so that the results can be tested easily
py_deps[name].append(spec) print('({})'.format(' with '.join(sorted(spec_list))))
names = list(py_deps.keys())
names.sort()
for name in names:
if py_deps[name]:
# Print out versioned provides, requires, recommends, conflicts
spec_list = []
for spec in py_deps[name]:
spec_list.append(convert(name, spec[0], spec[1]))
if len(spec_list) == 1:
print(spec_list[0])
else: else:
print('({})'.format(' with '.join(spec_list))) # Print out unversioned provides, requires, recommends, conflicts
else: print(name)
# Print out unversioned provides, requires, recommends, conflicts
print(name)

1
sources Normal file
View File

@ -0,0 +1 @@
SHA512 (test-sources-2020-04-29.tar.gz) = a5539fbe05a4f7128b4f82e960c3f1392a55ad53086dfd7fbc436d2743feaf64784e08667237baed3a32f149db25bc63e4ab3efc2b0270f969c59550b75102b1

View File

@ -0,0 +1,21 @@
Metadata-Version: 2.1
Name: pyreq2rpm.tests
Version: 2020.04.07.024dab0
Summary: Test package to verify conversion of dependencies from pip/python to rpm format, data taken from pyreq2rpm
Author: Tomas Orsava (author of this metapackage)
Home-page: https://github.com/gordonmessmer/pyreq2rpm
License: MIT
Description: This dist-info is mock metadata for a fictional package pyreq2rpm.tests
The important part of its contents is the requires.txt that contains
different formats of Python requirements taken from
https://github.com/gordonmessmer/pyreq2rpm, that are numbered as to be
unique. The metadata is then processed through
scripts/pythondistdeps.py and the resulting RPM requires compared to
expected results.
The version of the package contains the date when I converted the test
data from upstream to this metapackage, as well as the short hash of
the last git commit.
From the requirements I have omitted those that are incorrect, as they
crash the pythondistdeps.py script.

View File

@ -0,0 +1,102 @@
# Taken from pyreq2rpm, removed tests that are expected to fail
foobar0~=2.4.8
foobar1~=2.4.8.0
foobar2~=2.4.8.1
foobar4~=2.0
foobar7~=2.4.8b5
foobar8~=2.0.0b5
foobar9~=2.4.8.post1
foobar10~=2.0.post1
foobar11==2.4.8
foobar12==2.4.8.0
foobar13==2.4.8.1
foobar14==2.4.8.*
foobar15==2.0
foobar16==2
foobar17==2.*
foobar18==2.4.8b5
foobar19==2.0.0b5
foobar20==2.4.8.post1
foobar21==2.0.post1
foobar22===2.4.8
foobar23===2.4.8.0
foobar24===2.4.8.1
foobar26===2.0
foobar27===2
foobar29===2.4.8b5
foobar30===2.0.0b5
foobar31===2.4.8.post1
foobar32===2.0.post1
foobar33!=2.4.8
foobar34!=2.4.8.0
foobar35!=2.4.8.1
foobar36!=2.4.8.*
foobar37!=2.0
foobar38!=2
foobar39!=2.*
foobar40!=2.4.8b5
foobar41!=2.0.0b5
foobar42!=2.4.8.post1
foobar43!=2.0.post1
foobar44<=2.4.8
foobar45<=2.4.8.0
foobar46<=2.4.8.1
foobar47<=2.4.8.*
foobar48<=2.0
foobar49<=2
foobar50<=2.*
foobar51<=2.4.8b5
foobar52<=2.0.0b5
foobar53<=2.4.8.post1
foobar54<=2.0.post1
foobar55<2.4.8
foobar56<2.4.8.0
foobar57<2.4.8.1
foobar58<2.4.8.*
foobar59<2.0
foobar60<2
foobar61<2.*
foobar62<2.4.8b5
foobar63<2.0.0b5
foobar64<2.4.8.post1
foobar65<2.0.post1
foobar66>=2.4.8
foobar67>=2.4.8.0
foobar68>=2.4.8.1
foobar69>=2.4.8.*
foobar70>=2.0
foobar71>=2
foobar72>=2.*
foobar73>=2.4.8b5
foobar74>=2.0.0b5
foobar75>=2.4.8.post1
foobar76>=2.0.post1
foobar77>2.4.8
foobar78>2.4.8.0
foobar79>2.4.8.1
foobar80>2.4.8.*
foobar81>2.0
foobar82>2
foobar83>2.*
foobar84>2.4.8b5
foobar85>2.0.0b5
foobar86>2.4.8.post1
foobar87>2.0.post1
pyparsing0
pyparsing1>=2.0.1,!=2.0.4,!=2.1.2,!=2.1.6
babel>=1.3,!=2.0
# Tests for breakages in Fedora
fedora-python-nb2plots==0+unknown
# Other tests
hugo1==1.0.0.dev7
hugo2<=8a4
hugo3!=11.1.1b14
hugo4>11rc0
hugo5===11.1.0.post3

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,97 @@
setuptools:
wheel:
'41.6.0': ['2.7', '3.7', '3.9']
sdist:
'41.6.0': ['2.7', '3.7', '3.9', '3.10']
pip:
wheel:
'19.1.1': ['2.7', '3.7']
'20.0.2': ['3.9']
sdist:
'20.0.2': ['3.11']
packaging:
wheel:
'19.0': ['2.7', '3.7']
'20.1': ['3.9']
attrs:
sdist:
'19.1.0': ['2.7', '3.9']
pyparsing:
wheel:
'2.4.0': ['2.7', '3.7', '3.9']
six:
wheel:
'1.12.0': ['2.7', '3.7', '3.9']
tox:
wheel:
'3.14.0': ['2.7', '3.7', '3.9']
urllib3:
sdist:
'1.25.7': ['2.7', '3.9']
zope.component:
sdist:
'4.3.0': ['2.7', '3.9']
zope.event:
wheel:
'4.2.0': ['2.7', '3.9']
zope.schema:
sdist:
'4.4.2': ['2.7', '3.9']
zope.interface:
sdist:
'5.1.0': ['3.9']
wheel:
'4.6.0': ['2.7']
lxml:
lib: lib64
wheel:
'4.4.0': ['2.7', '3.7']
scipy:
lib: lib64
wheel:
'1.2.1': ['2.7', '3.7']
numpy:
lib: lib64
wheel:
'1.16.4': ['2.7']
'1.17.4': ['3.7']
numpy-stl:
lib: lib64
sdist:
'2.11.2': ['2.7', '3.7', '3.9']
PyQt5_sip:
lib: lib64
wheel:
'4.19.19': ['3.7']
PyQtWebEngine:
lib: lib64
wheel:
'5.12.1': ['3.7', '3.9']
MarkupSafe:
lib: lib64
wheel:
'1.1.1': ['2.7', '3.7']
simplejson:
lib: lib64
sdist:
'3.16.0': ['2.7', '3.7', '3.9']
backports.range:
lib: lib64
sdist:
'3.7.2': ['2.7', '3.7', '3.9']
mistune:
sdist:
'0.8.4': ['2.7', '3.9']
astroid:
wheel:
'2.3.3': ['3.7', '3.9']
kubernetes:
wheel:
'11.0.0b2': ['2.7']
'11.0.0': ['3.9']
fsleyes:
wheel:
'0.32.3': ['3.9']
taskotron-python-versions:
wheel:
'0.1.dev6': ['3.9']

View File

@ -0,0 +1 @@
../../../update-test-sources.sh

View File

@ -0,0 +1,242 @@
# Run tests using pytest, e.g. from the root directory
# $ python3 -m pytest --ignore tests/testing/ -vvv
#
# If there are any breakags, the best way to see differences is using a diff:
# $ diff tests/data/scripts_pythondistdeps/test-data.yaml <(python3 tests/test_scripts_pythondistdeps.py)
#
# - Test cases and expected results are saved in test-data.yaml inside
# TEST_DATA_PATH (currently ./data/scripts_pythondistdeps/)
# - To regenerate test-data.yaml file with the current results of
# pythondistdeps.py for each test configuration, execute this test file
# directly and results will be on stdout
# $ python3 test_scripts_pythondistdeps.py
#
# To add new test-data, add them to the test-requires.yaml: they will be
# downloaded automatically. And then add the resulting dist-info/egg-info paths
# into test-data.yaml under whichever requires/provides configurations you want
# to test
# - To find all dist-info/egg-info directories in the test-data directory,
# run inside test-data:
# $ find . -type d -regex ".*\(dist-info\|egg-info\)" | sort
#
# Requirements for this script:
# - Python >= 3.6
# - pip >= 20.0.1
# - setuptools
# - pytest
# - pyyaml
# - wheel
from pathlib import Path
import pytest
import shlex
import shutil
import subprocess
import sys
import tempfile
import yaml
PYTHONDISTDEPS_PATH = Path(__file__).parent / '..' / 'pythondistdeps.py'
TEST_DATA_PATH = Path(__file__).parent / 'data' / 'scripts_pythondistdeps'
def run_pythondistdeps(provides_params, requires_params, dist_egg_info_path):
"""Runs pythondistdeps.py on `dits_egg_info_path` with given
provides and requires parameters and returns a dict with generated provides and requires"""
info_path = TEST_DATA_PATH / dist_egg_info_path
files = '\n'.join(map(str, info_path.iterdir()))
provides = subprocess.check_output((sys.executable, PYTHONDISTDEPS_PATH, *shlex.split(provides_params)),
input=files, encoding="utf-8")
requires = subprocess.check_output((sys.executable, PYTHONDISTDEPS_PATH, *shlex.split(requires_params)),
input=files, encoding="utf-8")
return {"provides": provides.strip(), "requires": requires.strip()}
def load_test_data():
"""Reads the test-data.yaml and loads the test data into a dict."""
with TEST_DATA_PATH.joinpath('test-data.yaml').open() as file:
return yaml.safe_load(file)
def generate_test_cases(test_data):
"""Goes through the test data dict and yields test cases.
Test case is a tuple of 4 elements:
- provides parameters
- requires parameters
- path to the dist-info/egg-info directory inside test-data
- dict with expected results ("requires" and "provides")"""
for requires_params in test_data:
for provides_params in test_data[requires_params]:
for dist_egg_info_path in test_data[requires_params][provides_params]:
expected = test_data[requires_params][provides_params][dist_egg_info_path]
yield (provides_params, requires_params, dist_egg_info_path, expected)
def check_and_install_test_data():
"""Checks if the appropriate metadata are present in TEST_DATA_PATH, and if
not, downloads them through pip from PyPI."""
with TEST_DATA_PATH.joinpath('test-requires.yaml').open() as file:
test_requires = yaml.safe_load(file)
downloaded_anything = False
for package in test_requires:
# To be as close to the real environment, we want some packages saved in /usr/lib64 instead of /usr/lib,
# for these we explicitly set lib64 as a parameter, and by default we use /usr/lib.
lib = test_requires[package].pop("lib", "lib")
# type is either `wheel` or `sdist`
for type in test_requires[package]:
for pkg_version in test_requires[package][type]:
for py_version in test_requires[package][type][pkg_version]:
py_version_nodots = py_version.replace(".", "")
package_underscores = package.replace("-", "_")
suffix = ".egg-info" if type == "sdist" else ".dist-info"
pre_suffix = f"-py{py_version}" if type == "sdist" else ""
install_path = TEST_DATA_PATH / "usr" / lib / f"python{py_version}" \
/ "site-packages" / f"{package_underscores}-{pkg_version}{pre_suffix}{suffix}"
if install_path.exists():
continue
# If this is the first package we're downloading,
# display what's happening
if not downloaded_anything:
print("=====================")
print("Downloading test data")
print("=====================\n")
downloaded_anything = True
# We use a temporary directory to unpack/install the
# package to, and then we move only the metadata to the
# final location
with tempfile.TemporaryDirectory() as temp_dir:
import runpy
backup_argv = sys.argv[:]
if type == "wheel":
from pkg_resources import parse_version
abi = f"cp{py_version_nodots}"
# The "m" was removed from the abi flag in Python version 3.8
if parse_version(py_version) < parse_version('3.8'):
abi += "m"
# Install = download and unpack wheel into our
# temporary directory
sys.argv[1:] = ["install", "--no-deps",
"--only-binary", ":all:",
"--platform", "manylinux1_x86_64",
"--python-version", py_version,
"--implementation", "cp",
"--abi", abi,
"--target", temp_dir,
"--no-build-isolation",
f"{package}=={pkg_version}"]
else:
# Download sdist that we'll unpack later
sys.argv[1:] = ["download", "--no-deps",
"--no-binary", ":all:",
"--dest", temp_dir,
"--no-build-isolation",
f"{package}=={pkg_version}"]
try:
# run_module() alters sys.modules and sys.argv, but restores them at exit
runpy.run_module("pip", run_name="__main__", alter_sys=True)
except SystemExit as exc:
pass
finally:
sys.argv[:] = backup_argv
temp_path = Path(temp_dir)
if type == "sdist":
# Wheel were already unpacked by pip, sdists we
# have to unpack ourselves
sdist_path = next(temp_path.glob(f"{package}-{pkg_version}.*"))
if sdist_path.suffix == ".zip":
import zipfile
archive = zipfile.ZipFile(sdist_path)
else:
import tarfile
archive = tarfile.open(sdist_path)
archive.extractall(temp_path)
try:
info_path = next(temp_path.glob(f"**/*{suffix}"))
# Let's check the wheel metadata has the
# expected directory name. We don't check for
# egg-info metadata, because we're pulling them
# from sdists where they don't have the proper
# directory name
if type == "wheel":
if info_path.name != install_path.name:
print("\nWarning: wheel metadata have unexpected directory name.\n"
f"Expected: {install_path.name}\n"
f"Actual: {info_path.name}\n"
f"Info: package '{package}', version '{pkg_version}'"
f" for Python {py_version}\n"
f"Possible resolution: Specify the package version with"
f" trailing zeros in test-requires.yaml", file=sys.stderr)
shutil.move(info_path, install_path)
relative_path = install_path.relative_to(TEST_DATA_PATH)
print(f"\nDownloaded metadata to '{relative_path}'" \
f" inside test-data directory.\n")
except StopIteration:
# temp_path.glob() did not find any file and
# thus there's been some problem
sys.exit(f"Problem occured while getting dist-info/egg-info"
f" for package '{package}', version '{pkg_version}'"
f" for Python {py_version}")
if downloaded_anything:
print("\n==============================")
print("Finished downloading test data")
print("==============================")
@pytest.fixture(scope="session", autouse=True)
def fixture_check_and_install_test_data():
"""Wrapper fixture, because a fixture can't be called as a function."""
check_and_install_test_data()
@pytest.mark.parametrize("provides_params, requires_params, dist_egg_info_path, expected",
generate_test_cases(load_test_data()))
def test_pythondistdeps(provides_params, requires_params, dist_egg_info_path, expected):
"""Runs pythondistdeps with the given parameters and dist-info/egg-info
path, compares the results with the expected results"""
assert expected == run_pythondistdeps(provides_params, requires_params, dist_egg_info_path)
if __name__ == "__main__":
"""If the script is called directly, we check and install test data if needed,
we look up all the test configurations in test-data.yaml, run
pythondistdeps for each, save the results and print the resulting YAML file
with the updated results."""
check_and_install_test_data()
# Set YAML dump style to block style
def str_presenter(dumper, data):
if len(data.splitlines()) > 1: # check for multiline string
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
yaml.add_representer(str, str_presenter)
# Run pythondistdeps for each test configuration
test_data = load_test_data()
for provides_params, requires_params, dist_egg_info_path, expected in generate_test_cases(test_data):
# Print a dot to stderr for each test run to keep user informed about progress
print(".", end="", flush=True, file=sys.stderr)
test_data[requires_params][provides_params][dist_egg_info_path] = \
run_pythondistdeps(provides_params, requires_params, dist_egg_info_path)
print(yaml.dump(test_data, indent=4))

View File

@ -22,3 +22,26 @@
required_packages: required_packages:
- rpm-build - rpm-build
- python3-devel - python3-devel
- hosts: localhost
pre_tasks:
- import_role:
name: standard-test-source
vars:
fetch_only: True
srcdir: ./tests/data/scripts_pythondistdeps/
roles:
- role: standard-test-basic
tags:
- classic
tests:
- pythondistdeps:
dir: .
# Use update-test-sources.sh to update the test data
run: python3 -m pytest --capture=no -vvv
required_packages:
- python3-pip
- python3-pytest
- python3-pyyaml
- python3-setuptools
- python3-wheel

19
update-test-sources.sh Executable file
View File

@ -0,0 +1,19 @@
#!/bin/bash
#
# Requirements:
# - pip >= 20.0.1
#
# First prune old test data
rm -rf ./tests/data/scripts_pythondistdeps/usr
# First run the test suite, it will download the test-data again
python3 -m pytest --capture=no -vvv
# Archive the test data into a file with today's date
archive=test-sources-$(date +%Y-%m-%d).tar.gz
tar -zcvf ${archive} -C ./tests/data/scripts_pythondistdeps/ usr
# Now manually run:
# $ fedpkg new-sources ${archive}