Allow reusing pkgset FileCache from old composes.

Signed-off-by: Jan Kaluza <jkaluza@redhat.com>
This commit is contained in:
Jan Kaluza 2018-04-23 07:01:17 +02:00
parent 1e972885f5
commit 9915c7d644
4 changed files with 74 additions and 5 deletions

View File

@ -350,6 +350,14 @@ class WorkPaths(object):
makedirs(path)
return path
def pkgset_file_cache(self):
"""
Returns the path to file in which the cached version of
PackageSetBase.file_cache should be stored.
"""
return os.path.join(
self.topdir(arch="global"), "pkgset_file_cache.pickle")
class ComposePaths(object):
def __init__(self, compose):

View File

@ -21,6 +21,7 @@ It automatically finds a signed copies according to *sigkey_ordering*.
import itertools
import os
from six.moves import cPickle as pickle
import kobo.log
import kobo.pkgset
@ -50,7 +51,19 @@ class ReaderThread(WorkerThread):
rpm_path = self.pool.package_set.get_package_path(item)
if rpm_path is None:
return
rpm_obj = self.pool.package_set.file_cache.add(rpm_path)
# In case we have old file cache data, try to reuse it.
if self.pool.package_set.old_file_cache:
# The kobo.pkgset.FileCache does not have any method to check if
# the RPM is in cache. Instead it just re-uses the cached RPM data,
# if available, in .add() method.
# Therefore we call `old_file_cache.add()` which either returns the
# cached RPM object fast or just loads it from filesystem. We then
# add the returned RPM object to real `file_cache` directly.
rpm_obj = self.pool.package_set.old_file_cache.add(rpm_path)
self.pool.package_set.file_cache[rpm_path] = rpm_obj
else:
rpm_obj = self.pool.package_set.file_cache.add(rpm_path)
self.pool.package_set.rpms_by_arch.setdefault(rpm_obj.arch, []).append(rpm_obj)
if pkg_is_srpm(rpm_obj):
@ -71,6 +84,7 @@ class PackageSetBase(kobo.log.LoggingBase):
allow_invalid_sigkeys=False):
super(PackageSetBase, self).__init__(logger=logger)
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
self.old_file_cache = None
self.sigkey_ordering = sigkey_ordering or [None]
self.arches = arches
self.rpms_by_arch = {}
@ -215,6 +229,20 @@ class PackageSetBase(kobo.log.LoggingBase):
rpm_path = rpm_path[len(remove_path_prefix):]
f.write("%s\n" % rpm_path)
def load_old_file_cache(self, file_path):
"""
Loads the cached FileCache stored in pickle format in `file_path`.
"""
with open(file_path, "rb") as f:
self.old_file_cache = pickle.load(f)
def save_file_cache(self, file_path):
"""
Saves the current FileCache using the pickle module to `file_path`.
"""
with open(file_path, 'wb') as f:
pickle.dump(self.file_cache, f)
class FilelistPackageSet(PackageSetBase):
def get_package_path(self, queue_item):

View File

@ -19,14 +19,14 @@ from six.moves import cPickle as pickle
import json
import re
from itertools import groupby
from kobo.shortcuts import force_list
from kobo.shortcuts import force_list, relative_path
from kobo.rpmlib import make_nvra
import pungi.wrappers.kojiwrapper
from pungi.wrappers.comps import CompsWrapper
import pungi.phases.pkgset.pkgsets
from pungi.arch import get_valid_arches
from pungi.util import is_arch_multilib, retry
from pungi.util import is_arch_multilib, retry, find_old_compose
from pungi import Modulemd
from pungi.phases.pkgset.common import create_arch_repos, create_global_repo, populate_arch_pkgsets
@ -364,6 +364,30 @@ def _get_modules_from_koji_tags(
compose.log_info("%s" % module_msg)
def _find_old_file_cache_path(compose):
"""
Finds the old compose with "pkgset_file_cache.pickled" and returns
the path to it. If no compose is found, returns None.
"""
old_compose_path = find_old_compose(
compose.old_composes,
compose.ci_base.release.short,
compose.ci_base.release.version,
compose.ci_base.release.type_suffix if compose.conf['old_composes_per_release_type'] else None,
compose.ci_base.base_product.short if compose.ci_base.release.is_layered else None,
compose.ci_base.base_product.version if compose.ci_base.release.is_layered else None,
)
if not old_compose_path:
return None
old_file_cache_dir = compose.paths.work.pkgset_file_cache()
rel_dir = relative_path(old_file_cache_dir, compose.topdir.rstrip('/') + '/')
old_file_cache_path = os.path.join(old_compose_path, rel_dir)
if not os.path.exists(old_file_cache_path):
return None
return old_file_cache_path
def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
all_arches = set(["src"])
for arch in compose.get_arches():
@ -466,6 +490,10 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,
arches=all_arches)
old_file_cache_path = _find_old_file_cache_path(compose)
if old_file_cache_path:
compose.log_info("Reusing old PKGSET file cache from %s" % old_file_cache_path)
global_pkgset.load_old_file_cache(old_file_cache_path)
# Get package set for each compose tag and merge it to global package
# list. Also prepare per-variant pkgset, because we do not have list
# of binary RPMs in module definition - there is just list of SRPMs.
@ -477,6 +505,8 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
arches=all_arches, packages=packages_to_gather,
allow_invalid_sigkeys=allow_invalid_sigkeys,
populate_only_packages=populate_only_packages_to_gather)
if old_file_cache_path:
pkgset.load_old_file_cache(old_file_cache_path)
# Create a filename for log with package-to-tag mapping. The tag
# name is included in filename, so any slashes in it are replaced
# with underscores just to be safe.
@ -505,6 +535,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
global_pkgset.save_file_list(
compose.paths.work.package_list(arch="global"),
remove_path_prefix=path_prefix)
global_pkgset.save_file_cache(compose.paths.work.pkgset_file_cache())
return global_pkgset

View File

@ -116,7 +116,8 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
[mock.call.populate('f25', 123456, inherit=True,
logfile=self.topdir + '/logs/global/packages_from_f25.global.log'),
mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
remove_path_prefix='/prefix')])
remove_path_prefix='/prefix'),
mock.call.save_file_cache(self.topdir + '/work/global/pkgset_file_cache.pickle')])
self.assertItemsEqual(pickle_dumps.call_args_list,
[mock.call(orig_pkgset)])
with open(self.pkgset_path) as f:
@ -215,7 +216,8 @@ data:
self.assertEqual(
pkgset.mock_calls,
[mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
remove_path_prefix='/prefix')])
remove_path_prefix='/prefix'),
mock.call.save_file_cache(self.topdir + '/work/global/pkgset_file_cache.pickle')])
@mock.patch('six.moves.cPickle.dumps')
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate')