2018-09-21 20:09:00 +00:00
|
|
|
#!/bin/python3
|
|
|
|
|
|
|
|
# This script looks for the 'latest' comps-fXX.xml.in file, assumes it's
|
|
|
|
# for Rawhide, and looks through its `pkgreq` lines for ones that specify
|
|
|
|
# packages that do not currently exist in Rawhide. It is arch-aware. It
|
|
|
|
# expects to be run on a Fedora system with network access, as it will
|
|
|
|
# try to query the 'rawhide' dnf repo to get lists of currently-existing
|
2022-08-29 23:09:00 +00:00
|
|
|
# packages. You will need fedora-repos-rawhide and python3-lxml.
|
2018-09-21 20:09:00 +00:00
|
|
|
|
|
|
|
import glob
|
2020-06-30 17:46:41 +00:00
|
|
|
import argparse
|
2018-09-21 20:09:00 +00:00
|
|
|
import subprocess
|
2020-06-30 17:46:41 +00:00
|
|
|
import lxml.etree as ET
|
|
|
|
from collections import defaultdict
|
2018-09-21 20:09:00 +00:00
|
|
|
|
2022-08-29 23:09:00 +00:00
|
|
|
ARCHES = ('aarch64', 'ppc64le', 's390x', 'x86_64')
|
2018-09-21 20:09:00 +00:00
|
|
|
|
2020-06-30 17:46:41 +00:00
|
|
|
parser = argparse.ArgumentParser(description='Check the comps file for missing packages and packages missing on architectures')
|
|
|
|
parser.add_argument('--update', dest='update', action='store_true', default=False,
|
|
|
|
help='Update the comps file with the changes')
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2018-09-21 20:09:00 +00:00
|
|
|
# gather package lists. this eats lots of RAM. I don't care.
|
|
|
|
pkgs = {}
|
|
|
|
for arch in ARCHES:
|
|
|
|
pkgtext = subprocess.run(('dnf', '--forcearch={}'.format(arch), '--disablerepo=*', '--enablerepo=rawhide', 'repoquery', '--qf=%{NAME}'), capture_output=True, text=True).stdout
|
|
|
|
pkgs[arch] = pkgtext.splitlines()
|
|
|
|
|
|
|
|
# find the *latest* comps file (assume it's rawhide)
|
|
|
|
compsfiles = glob.glob('comps-f*.xml.in')
|
|
|
|
latest = sorted(compsfiles, key=lambda x: int(''.join(c for c in x if c.isdigit())))[-1]
|
2020-06-30 17:46:41 +00:00
|
|
|
|
|
|
|
|
2018-09-21 20:09:00 +00:00
|
|
|
# find package reqs in comps
|
2020-06-30 17:46:41 +00:00
|
|
|
tree = ET.parse(latest) #, ET.XMLParser(target=CommentedTreeBuilder()))
|
|
|
|
root = tree.getroot()
|
2018-09-21 20:09:00 +00:00
|
|
|
pkgreqs = root.findall('.//packagereq')
|
|
|
|
|
2020-06-30 17:46:41 +00:00
|
|
|
# Check if each package is in the repository for each architecture
|
|
|
|
removedpkgs = defaultdict(list)
|
|
|
|
archpkgs = defaultdict(list)
|
2018-09-21 20:09:00 +00:00
|
|
|
for pkgreq in pkgreqs:
|
2022-09-01 22:56:26 +00:00
|
|
|
reqtype = pkgreq.get('type', '')
|
2018-09-21 20:09:00 +00:00
|
|
|
# list of arches the package is missing on
|
|
|
|
missing = []
|
2020-06-30 17:46:41 +00:00
|
|
|
present = []
|
|
|
|
|
2018-09-21 20:09:00 +00:00
|
|
|
# arches the package is listed for (if no 'arch' key, it's listed for all)
|
2022-08-29 23:09:00 +00:00
|
|
|
reqarches = pkgreq.get('arch', '')
|
2018-09-21 20:09:00 +00:00
|
|
|
if reqarches:
|
|
|
|
reqarches = reqarches.split(',')
|
|
|
|
else:
|
|
|
|
reqarches = ARCHES
|
|
|
|
|
|
|
|
# do the actual check, append arch to 'missing' if it's not there
|
|
|
|
for arch in reqarches:
|
|
|
|
if arch in pkgs and pkgreq.text not in pkgs[arch]:
|
|
|
|
missing.append(arch)
|
2020-06-30 17:46:41 +00:00
|
|
|
else:
|
|
|
|
present.append(arch)
|
|
|
|
|
|
|
|
grpid = pkgreq.find('./../../id').text
|
|
|
|
pkgname = pkgreq.text
|
2018-09-21 20:09:00 +00:00
|
|
|
|
|
|
|
# print the result
|
|
|
|
if missing == list(ARCHES):
|
2020-06-30 17:46:41 +00:00
|
|
|
if pkgreq.getparent() is not None:
|
|
|
|
removedpkgs[pkgname].append(grpid)
|
2022-09-01 22:56:26 +00:00
|
|
|
# pkgreq.getparent().remove(pkgreq)
|
|
|
|
elif missing and reqtype != 'optional':
|
|
|
|
archpkgs[pkgname] = ','.join(present)
|
2020-06-30 17:46:41 +00:00
|
|
|
|
|
|
|
# Find empty groups after packages not in repositories have been removed
|
|
|
|
pkglists = root.findall('.//packagelist')
|
|
|
|
removedgrps = {}
|
|
|
|
for pkglist in pkglists:
|
|
|
|
if not len(pkglist):
|
|
|
|
group = pkglist.getparent()
|
|
|
|
grpid = group.find('./id').text
|
|
|
|
removedgrps[grpid] = []
|
|
|
|
group.getparent().remove(group)
|
|
|
|
|
|
|
|
|
|
|
|
# Remove any empty groups from the environment lists
|
|
|
|
envlists = root.findall('.//environment//groupid')
|
|
|
|
for envgrp in envlists:
|
|
|
|
grpid = envgrp.text
|
|
|
|
if grpid in removedgrps:
|
|
|
|
# The groups are inside a grouplist inside the environment
|
|
|
|
par = envgrp.getparent()
|
|
|
|
envid = par.getparent().find('./id').text
|
|
|
|
removedgrps[grpid].append(envid)
|
|
|
|
par.remove(envgrp)
|
|
|
|
|
|
|
|
|
|
|
|
# Remove any empty groups from the category lists
|
|
|
|
catlists = root.findall('.//category//groupid')
|
|
|
|
for catgrp in catlists:
|
|
|
|
grpid = catgrp.text
|
|
|
|
if grpid in removedgrps:
|
|
|
|
# The groups are inside a grouplist inside the category
|
|
|
|
par = catgrp.getparent()
|
|
|
|
catid = par.getparent().find('./id').text
|
|
|
|
removedgrps[grpid].append(catid)
|
|
|
|
par.remove(catgrp)
|
|
|
|
|
|
|
|
|
|
|
|
# Remove any language packs for packages that don't exist anymore
|
|
|
|
langpacks = root.find('.//langpacks')
|
|
|
|
removedlang = []
|
|
|
|
for lang in langpacks.getchildren():
|
|
|
|
pkg = lang.get('name')
|
|
|
|
if pkg in list(removedpkgs):
|
|
|
|
removedlang.append(pkg)
|
|
|
|
lang.getparent().remove(lang)
|
|
|
|
|
|
|
|
# Print out a summary
|
|
|
|
print('Packages with incorrect architecture tags:')
|
|
|
|
for pkg in sorted(archpkgs):
|
|
|
|
print(' {} only available on {}'.format(pkg, archpkgs[pkg]))
|
|
|
|
|
|
|
|
print('\nRemoving packages:')
|
|
|
|
for pkg in sorted(removedpkgs):
|
|
|
|
print(' {} in group {}'.format(pkg, ', '.join(removedpkgs[pkg])))
|
|
|
|
|
|
|
|
print('\nRemoving empty groups:')
|
|
|
|
for group in sorted(removedgrps):
|
|
|
|
print(' {} in {}'.format(group, ', '.join(removedgrps[group])))
|
|
|
|
|
|
|
|
print('\nRemoving language packs for:')
|
|
|
|
for lang in removedlang:
|
|
|
|
print(' {}'.format(lang))
|
|
|
|
|
|
|
|
|
|
|
|
# Write out the updated XML file if desired
|
|
|
|
if args.update:
|
|
|
|
tree.write(latest, encoding="UTF-8", xml_declaration=True)
|