Use dogpile.cache to cache the listTaggedRPMS calls if possible

If the same tag is queried with the same event, Pungi can cache the
response and call the API again. Particularly for small composes this
can save up significant amount of time.

Merges: https://pagure.io/pungi/pull-request/1022
Signed-off-by: Jan Kaluza <jkaluza@redhat.com>
This commit is contained in:
Jan Kaluza 2018-08-15 09:20:40 +02:00 committed by Lubomír Sedlář
parent 92968fe52d
commit d2f392fac8
10 changed files with 119 additions and 4 deletions

View File

@ -1674,3 +1674,29 @@ Miscellaneous Settings
(*str*) -- If set, the ISO files from ``buildinstall``, ``createiso`` and (*str*) -- If set, the ISO files from ``buildinstall``, ``createiso`` and
``live_images`` phases will be put into this destination, and a symlink ``live_images`` phases will be put into this destination, and a symlink
pointing to this location will be created in actual compose directory. pointing to this location will be created in actual compose directory.
**dogpile_cache_backend**
(*str*) -- If set, Pungi will use the configured Dogpile cache backend to
cache various data between multiple Pungi calls. This can make Pungi
faster in case more similar composes are running regularly in short time.
For list of available backends, please see the
https://dogpilecache.readthedocs.io documentation.
Most typical configuration uses the ``dogpile.cache.dbm`` backend.
**dogpile_cache_arguments**
(*dict*) -- Arguments to be used when creating the Dogpile cache backend.
See the particular backend's configuration for the list of possible
key/value pairs.
For the ``dogpile.cache.dbm`` backend, the value can be for example
following: ::
{
"filename": "/tmp/pungi_cache_file.dbm"
}
**dogpile_cache_expiration_time**
(*int*) -- Defines the default expiration time in seconds of data stored
in the Dogpile cache. Defaults to 3600 seconds.

View File

@ -25,6 +25,7 @@ These packages will have to installed:
* libmodulemd * libmodulemd
* libselinux-python * libselinux-python
* lorax * lorax
* python-dogpile-cache
* python-jsonschema * python-jsonschema
* python-kickstart * python-kickstart
* python-libcomps * python-libcomps
@ -60,7 +61,7 @@ packages above as they are used by calling an executable. ::
$ for pkg in _deltarpm krbV _selinux deltarpm sqlitecachec _sqlitecache; do ln -vs "$(deactivate && python -c 'import os, '$pkg'; print('$pkg'.__file__)')" "$(virtualenvwrapper_get_site_packages_dir)"; done $ for pkg in _deltarpm krbV _selinux deltarpm sqlitecachec _sqlitecache; do ln -vs "$(deactivate && python -c 'import os, '$pkg'; print('$pkg'.__file__)')" "$(virtualenvwrapper_get_site_packages_dir)"; done
$ pip install -U pip $ pip install -U pip
$ PYCURL_SSL_LIBRARY=nss pip install pycurl --no-binary :all: $ PYCURL_SSL_LIBRARY=nss pip install pycurl --no-binary :all:
$ pip install beanbag jsonschema 'kobo>=0.6.0' lockfile lxml mock nose nose-cov productmd pyopenssl python-multilib requests requests-kerberos setuptools sphinx ordered_set koji PyYAML $ pip install beanbag jsonschema 'kobo>=0.6.0' lockfile lxml mock nose nose-cov productmd pyopenssl python-multilib requests requests-kerberos setuptools sphinx ordered_set koji PyYAML dogpile.cache
Now you should be able to run all existing tests. Now you should be able to run all existing tests.

View File

@ -20,6 +20,7 @@ BuildRequires: python2-multilib
BuildRequires: python2-libcomps BuildRequires: python2-libcomps
BuildRequires: python2-six BuildRequires: python2-six
BuildRequires: python2-multilib BuildRequires: python2-multilib
BuildRequires: python2-dogpile-cache
Requires: createrepo >= 0.4.11 Requires: createrepo >= 0.4.11
Requires: yum => 3.4.3-28 Requires: yum => 3.4.3-28
@ -49,6 +50,7 @@ Requires: python2-dnf
Requires: python2-multilib Requires: python2-multilib
Requires: python2-libcomps Requires: python2-libcomps
Requires: python2-six Requires: python2-six
Requires: python2-dogpile-cache
BuildArch: noarch BuildArch: noarch

View File

@ -711,6 +711,12 @@ def make_schema():
"default": False "default": False
}, },
"symlink_isos_to": {"type": "string"}, "symlink_isos_to": {"type": "string"},
"dogpile_cache_backend": {"type": "string"},
"dogpile_cache_expiration_time": {"type": "number"},
"dogpile_cache_arguments": {
"type": "object",
"default": {},
},
"createiso_skip": _variant_arch_mapping({"type": "boolean"}), "createiso_skip": _variant_arch_mapping({"type": "boolean"}),
"createiso_break_hardlinks": { "createiso_break_hardlinks": {
"type": "boolean", "type": "boolean",

View File

@ -29,6 +29,8 @@ import json
import kobo.log import kobo.log
from productmd.composeinfo import ComposeInfo from productmd.composeinfo import ComposeInfo
from productmd.images import Images from productmd.images import Images
from dogpile.cache import make_region
from pungi.wrappers.variants import VariantsXmlParser from pungi.wrappers.variants import VariantsXmlParser
from pungi.paths import Paths from pungi.paths import Paths
@ -156,6 +158,15 @@ class Compose(kobo.log.LoggingBase):
self.attempted_deliverables = {} self.attempted_deliverables = {}
self.required_deliverables = {} self.required_deliverables = {}
if self.conf.get("dogpile_cache_backend", None):
self.cache_region = make_region().configure(
self.conf.get("dogpile_cache_backend"),
expiration_time=self.conf.get("dogpile_cache_expiration_time", 3600),
arguments=self.conf.get("dogpile_cache_arguments", {})
)
else:
self.cache_region = make_region().configure('dogpile.cache.null')
get_compose_dir = staticmethod(get_compose_dir) get_compose_dir = staticmethod(get_compose_dir)
def __getitem__(self, name): def __getitem__(self, name):

View File

@ -275,7 +275,7 @@ class FilelistPackageSet(PackageSetBase):
class KojiPackageSet(PackageSetBase): class KojiPackageSet(PackageSetBase):
def __init__(self, koji_wrapper, sigkey_ordering, arches=None, logger=None, def __init__(self, koji_wrapper, sigkey_ordering, arches=None, logger=None,
packages=None, allow_invalid_sigkeys=False, packages=None, allow_invalid_sigkeys=False,
populate_only_packages=False): populate_only_packages=False, cache_region=None):
""" """
Creates new KojiPackageSet. Creates new KojiPackageSet.
@ -298,6 +298,10 @@ class KojiPackageSet(PackageSetBase):
when generating compose from predefined list of packages from big when generating compose from predefined list of packages from big
Koji tag. Koji tag.
When False, all packages from Koji tag are added to KojiPackageSet. When False, all packages from Koji tag are added to KojiPackageSet.
:param dogpile.cache.CacheRegion cache_region: If set, the CacheRegion
will be used to cache the list of RPMs per Koji tag, so next calls
of the KojiPackageSet.populate(...) method won't try fetching it
again.
""" """
super(KojiPackageSet, self).__init__(sigkey_ordering=sigkey_ordering, super(KojiPackageSet, self).__init__(sigkey_ordering=sigkey_ordering,
arches=arches, logger=logger, arches=arches, logger=logger,
@ -306,12 +310,14 @@ class KojiPackageSet(PackageSetBase):
# Names of packages to look for in the Koji tag. # Names of packages to look for in the Koji tag.
self.packages = set(packages or []) self.packages = set(packages or [])
self.populate_only_packages = populate_only_packages self.populate_only_packages = populate_only_packages
self.cache_region = cache_region
def __getstate__(self): def __getstate__(self):
result = self.__dict__.copy() result = self.__dict__.copy()
result["koji_profile"] = self.koji_wrapper.profile result["koji_profile"] = self.koji_wrapper.profile
del result["koji_wrapper"] del result["koji_wrapper"]
del result["_logger"] del result["_logger"]
del result["cache_region"]
return result return result
def __setstate__(self, data): def __setstate__(self, data):
@ -325,7 +331,20 @@ class KojiPackageSet(PackageSetBase):
return self.koji_wrapper.koji_proxy return self.koji_wrapper.koji_proxy
def get_latest_rpms(self, tag, event, inherit=True): def get_latest_rpms(self, tag, event, inherit=True):
return self.koji_proxy.listTaggedRPMS(tag, event=event, inherit=inherit, latest=True) if self.cache_region:
cache_key = "KojiPackageSet.get_latest_rpms_%s_%s_%s" % (
tag, str(event), str(inherit))
cached_response = self.cache_region.get(cache_key)
if cached_response:
return cached_response
else:
response = self.koji_proxy.listTaggedRPMS(
tag, event=event, inherit=inherit, latest=True)
self.cache_region.set(cache_key, response)
return response
else:
return self.koji_proxy.listTaggedRPMS(
tag, event=event, inherit=inherit, latest=True)
def get_package_path(self, queue_item): def get_package_path(self, queue_item):
rpm_info, build_info = queue_item rpm_info, build_info = queue_item

View File

@ -560,7 +560,8 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event_id):
koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,
arches=all_arches, packages=packages_to_gather, arches=all_arches, packages=packages_to_gather,
allow_invalid_sigkeys=allow_invalid_sigkeys, allow_invalid_sigkeys=allow_invalid_sigkeys,
populate_only_packages=populate_only_packages_to_gather) populate_only_packages=populate_only_packages_to_gather,
cache_region=compose.cache_region)
if old_file_cache_path: if old_file_cache_path:
pkgset.load_old_file_cache(old_file_cache_path) pkgset.load_old_file_cache(old_file_cache_path)
# Create a filename for log with package-to-tag mapping. The tag # Create a filename for log with package-to-tag mapping. The tag

View File

@ -62,6 +62,7 @@ setup(
"lxml", "lxml",
"productmd", "productmd",
"six", "six",
'dogpile.cache',
], ],
tests_require = [ tests_require = [
"mock", "mock",

View File

@ -152,6 +152,7 @@ class DummyCompose(object):
self.fail_deliverable = mock.Mock() self.fail_deliverable = mock.Mock()
self.require_deliverable = mock.Mock() self.require_deliverable = mock.Mock()
self.should_create_yum_database = True self.should_create_yum_database = True
self.cache_region = None
self.DEBUG = False self.DEBUG = False

View File

@ -11,6 +11,7 @@ except ImportError:
import json import json
import tempfile import tempfile
import re import re
from dogpile.cache import make_region
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
@ -322,6 +323,52 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
'i686': ['rpms/bash@4.3.42@4.fc24@i686'], 'i686': ['rpms/bash@4.3.42@4.fc24@i686'],
'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64']}) 'x86_64': ['rpms/bash@4.3.42@4.fc24@x86_64']})
def test_get_latest_rpms_cache(self):
self._touch_files([
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
cache_region = make_region().configure("dogpile.cache.memory")
pkgset = pkgsets.KojiPackageSet(self.koji_wrapper, [None], arches=['x86_64'],
cache_region=cache_region)
# Try calling the populate twice, but expect just single listTaggedRPMs
# call - that means the caching worked.
for i in range(2):
result = pkgset.populate('f25')
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
[mock.call.listTaggedRPMS('f25', event=None, inherit=True, latest=True)])
self.assertPkgsetEqual(
result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@x86_64']})
def test_get_latest_rpms_cache_different_id(self):
self._touch_files([
'rpms/bash@4.3.42@4.fc24@x86_64',
'rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
])
cache_region = make_region().configure("dogpile.cache.memory")
pkgset = pkgsets.KojiPackageSet(self.koji_wrapper, [None], arches=['x86_64'],
cache_region=cache_region)
# Try calling the populate twice with different event id. It must not
# cache anything.
expected_calls = []
for i in range(2):
expected_calls.append(
mock.call.listTaggedRPMS('f25', event=i, inherit=True, latest=True))
result = pkgset.populate('f25', event={"id": i})
self.assertEqual(
self.koji_wrapper.koji_proxy.mock_calls,
expected_calls)
self.assertPkgsetEqual(
result,
{'x86_64': ['rpms/bash-debuginfo@4.3.42@4.fc24@x86_64',
'rpms/bash@4.3.42@4.fc24@x86_64']})
@mock.patch('kobo.pkgset.FileCache', new=MockFileCache) @mock.patch('kobo.pkgset.FileCache', new=MockFileCache)
class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase): class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase):