Allow reusing pkgset FileCache from old composes.
Signed-off-by: Jan Kaluza <jkaluza@redhat.com>
This commit is contained in:
parent
1e972885f5
commit
9915c7d644
@ -350,6 +350,14 @@ class WorkPaths(object):
|
|||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
def pkgset_file_cache(self):
|
||||||
|
"""
|
||||||
|
Returns the path to file in which the cached version of
|
||||||
|
PackageSetBase.file_cache should be stored.
|
||||||
|
"""
|
||||||
|
return os.path.join(
|
||||||
|
self.topdir(arch="global"), "pkgset_file_cache.pickle")
|
||||||
|
|
||||||
|
|
||||||
class ComposePaths(object):
|
class ComposePaths(object):
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
|
@ -21,6 +21,7 @@ It automatically finds a signed copies according to *sigkey_ordering*.
|
|||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
|
from six.moves import cPickle as pickle
|
||||||
|
|
||||||
import kobo.log
|
import kobo.log
|
||||||
import kobo.pkgset
|
import kobo.pkgset
|
||||||
@ -50,7 +51,19 @@ class ReaderThread(WorkerThread):
|
|||||||
rpm_path = self.pool.package_set.get_package_path(item)
|
rpm_path = self.pool.package_set.get_package_path(item)
|
||||||
if rpm_path is None:
|
if rpm_path is None:
|
||||||
return
|
return
|
||||||
rpm_obj = self.pool.package_set.file_cache.add(rpm_path)
|
|
||||||
|
# In case we have old file cache data, try to reuse it.
|
||||||
|
if self.pool.package_set.old_file_cache:
|
||||||
|
# The kobo.pkgset.FileCache does not have any method to check if
|
||||||
|
# the RPM is in cache. Instead it just re-uses the cached RPM data,
|
||||||
|
# if available, in .add() method.
|
||||||
|
# Therefore we call `old_file_cache.add()` which either returns the
|
||||||
|
# cached RPM object fast or just loads it from filesystem. We then
|
||||||
|
# add the returned RPM object to real `file_cache` directly.
|
||||||
|
rpm_obj = self.pool.package_set.old_file_cache.add(rpm_path)
|
||||||
|
self.pool.package_set.file_cache[rpm_path] = rpm_obj
|
||||||
|
else:
|
||||||
|
rpm_obj = self.pool.package_set.file_cache.add(rpm_path)
|
||||||
self.pool.package_set.rpms_by_arch.setdefault(rpm_obj.arch, []).append(rpm_obj)
|
self.pool.package_set.rpms_by_arch.setdefault(rpm_obj.arch, []).append(rpm_obj)
|
||||||
|
|
||||||
if pkg_is_srpm(rpm_obj):
|
if pkg_is_srpm(rpm_obj):
|
||||||
@ -71,6 +84,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
allow_invalid_sigkeys=False):
|
allow_invalid_sigkeys=False):
|
||||||
super(PackageSetBase, self).__init__(logger=logger)
|
super(PackageSetBase, self).__init__(logger=logger)
|
||||||
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
|
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
|
||||||
|
self.old_file_cache = None
|
||||||
self.sigkey_ordering = sigkey_ordering or [None]
|
self.sigkey_ordering = sigkey_ordering or [None]
|
||||||
self.arches = arches
|
self.arches = arches
|
||||||
self.rpms_by_arch = {}
|
self.rpms_by_arch = {}
|
||||||
@ -215,6 +229,20 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
rpm_path = rpm_path[len(remove_path_prefix):]
|
rpm_path = rpm_path[len(remove_path_prefix):]
|
||||||
f.write("%s\n" % rpm_path)
|
f.write("%s\n" % rpm_path)
|
||||||
|
|
||||||
|
def load_old_file_cache(self, file_path):
|
||||||
|
"""
|
||||||
|
Loads the cached FileCache stored in pickle format in `file_path`.
|
||||||
|
"""
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
self.old_file_cache = pickle.load(f)
|
||||||
|
|
||||||
|
def save_file_cache(self, file_path):
|
||||||
|
"""
|
||||||
|
Saves the current FileCache using the pickle module to `file_path`.
|
||||||
|
"""
|
||||||
|
with open(file_path, 'wb') as f:
|
||||||
|
pickle.dump(self.file_cache, f)
|
||||||
|
|
||||||
|
|
||||||
class FilelistPackageSet(PackageSetBase):
|
class FilelistPackageSet(PackageSetBase):
|
||||||
def get_package_path(self, queue_item):
|
def get_package_path(self, queue_item):
|
||||||
|
@ -19,14 +19,14 @@ from six.moves import cPickle as pickle
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
from kobo.shortcuts import force_list
|
from kobo.shortcuts import force_list, relative_path
|
||||||
from kobo.rpmlib import make_nvra
|
from kobo.rpmlib import make_nvra
|
||||||
|
|
||||||
import pungi.wrappers.kojiwrapper
|
import pungi.wrappers.kojiwrapper
|
||||||
from pungi.wrappers.comps import CompsWrapper
|
from pungi.wrappers.comps import CompsWrapper
|
||||||
import pungi.phases.pkgset.pkgsets
|
import pungi.phases.pkgset.pkgsets
|
||||||
from pungi.arch import get_valid_arches
|
from pungi.arch import get_valid_arches
|
||||||
from pungi.util import is_arch_multilib, retry
|
from pungi.util import is_arch_multilib, retry, find_old_compose
|
||||||
from pungi import Modulemd
|
from pungi import Modulemd
|
||||||
|
|
||||||
from pungi.phases.pkgset.common import create_arch_repos, create_global_repo, populate_arch_pkgsets
|
from pungi.phases.pkgset.common import create_arch_repos, create_global_repo, populate_arch_pkgsets
|
||||||
@ -364,6 +364,30 @@ def _get_modules_from_koji_tags(
|
|||||||
compose.log_info("%s" % module_msg)
|
compose.log_info("%s" % module_msg)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_old_file_cache_path(compose):
|
||||||
|
"""
|
||||||
|
Finds the old compose with "pkgset_file_cache.pickled" and returns
|
||||||
|
the path to it. If no compose is found, returns None.
|
||||||
|
"""
|
||||||
|
old_compose_path = find_old_compose(
|
||||||
|
compose.old_composes,
|
||||||
|
compose.ci_base.release.short,
|
||||||
|
compose.ci_base.release.version,
|
||||||
|
compose.ci_base.release.type_suffix if compose.conf['old_composes_per_release_type'] else None,
|
||||||
|
compose.ci_base.base_product.short if compose.ci_base.release.is_layered else None,
|
||||||
|
compose.ci_base.base_product.version if compose.ci_base.release.is_layered else None,
|
||||||
|
)
|
||||||
|
if not old_compose_path:
|
||||||
|
return None
|
||||||
|
|
||||||
|
old_file_cache_dir = compose.paths.work.pkgset_file_cache()
|
||||||
|
rel_dir = relative_path(old_file_cache_dir, compose.topdir.rstrip('/') + '/')
|
||||||
|
old_file_cache_path = os.path.join(old_compose_path, rel_dir)
|
||||||
|
if not os.path.exists(old_file_cache_path):
|
||||||
|
return None
|
||||||
|
return old_file_cache_path
|
||||||
|
|
||||||
|
|
||||||
def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
|
def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
|
||||||
all_arches = set(["src"])
|
all_arches = set(["src"])
|
||||||
for arch in compose.get_arches():
|
for arch in compose.get_arches():
|
||||||
@ -466,6 +490,10 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
|
|||||||
global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
||||||
koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,
|
koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,
|
||||||
arches=all_arches)
|
arches=all_arches)
|
||||||
|
old_file_cache_path = _find_old_file_cache_path(compose)
|
||||||
|
if old_file_cache_path:
|
||||||
|
compose.log_info("Reusing old PKGSET file cache from %s" % old_file_cache_path)
|
||||||
|
global_pkgset.load_old_file_cache(old_file_cache_path)
|
||||||
# Get package set for each compose tag and merge it to global package
|
# Get package set for each compose tag and merge it to global package
|
||||||
# list. Also prepare per-variant pkgset, because we do not have list
|
# list. Also prepare per-variant pkgset, because we do not have list
|
||||||
# of binary RPMs in module definition - there is just list of SRPMs.
|
# of binary RPMs in module definition - there is just list of SRPMs.
|
||||||
@ -477,6 +505,8 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
|
|||||||
arches=all_arches, packages=packages_to_gather,
|
arches=all_arches, packages=packages_to_gather,
|
||||||
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||||
populate_only_packages=populate_only_packages_to_gather)
|
populate_only_packages=populate_only_packages_to_gather)
|
||||||
|
if old_file_cache_path:
|
||||||
|
pkgset.load_old_file_cache(old_file_cache_path)
|
||||||
# Create a filename for log with package-to-tag mapping. The tag
|
# Create a filename for log with package-to-tag mapping. The tag
|
||||||
# name is included in filename, so any slashes in it are replaced
|
# name is included in filename, so any slashes in it are replaced
|
||||||
# with underscores just to be safe.
|
# with underscores just to be safe.
|
||||||
@ -505,6 +535,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
|
|||||||
global_pkgset.save_file_list(
|
global_pkgset.save_file_list(
|
||||||
compose.paths.work.package_list(arch="global"),
|
compose.paths.work.package_list(arch="global"),
|
||||||
remove_path_prefix=path_prefix)
|
remove_path_prefix=path_prefix)
|
||||||
|
global_pkgset.save_file_cache(compose.paths.work.pkgset_file_cache())
|
||||||
return global_pkgset
|
return global_pkgset
|
||||||
|
|
||||||
|
|
||||||
|
@ -116,7 +116,8 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
[mock.call.populate('f25', 123456, inherit=True,
|
[mock.call.populate('f25', 123456, inherit=True,
|
||||||
logfile=self.topdir + '/logs/global/packages_from_f25.global.log'),
|
logfile=self.topdir + '/logs/global/packages_from_f25.global.log'),
|
||||||
mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
|
mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
|
||||||
remove_path_prefix='/prefix')])
|
remove_path_prefix='/prefix'),
|
||||||
|
mock.call.save_file_cache(self.topdir + '/work/global/pkgset_file_cache.pickle')])
|
||||||
self.assertItemsEqual(pickle_dumps.call_args_list,
|
self.assertItemsEqual(pickle_dumps.call_args_list,
|
||||||
[mock.call(orig_pkgset)])
|
[mock.call(orig_pkgset)])
|
||||||
with open(self.pkgset_path) as f:
|
with open(self.pkgset_path) as f:
|
||||||
@ -215,7 +216,8 @@ data:
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
pkgset.mock_calls,
|
pkgset.mock_calls,
|
||||||
[mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
|
[mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf',
|
||||||
remove_path_prefix='/prefix')])
|
remove_path_prefix='/prefix'),
|
||||||
|
mock.call.save_file_cache(self.topdir + '/work/global/pkgset_file_cache.pickle')])
|
||||||
|
|
||||||
@mock.patch('six.moves.cPickle.dumps')
|
@mock.patch('six.moves.cPickle.dumps')
|
||||||
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate')
|
@mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate')
|
||||||
|
Loading…
Reference in New Issue
Block a user