pkgset: Create global repo in parallel to merging pkgsets
The work in creating the repo is done in a separate process. This can easily use two threads. Signed-off-by: Lubomír Sedlář <lsedlar@redhat.com>
This commit is contained in:
parent
dc557b809a
commit
bd9a0ceda2
@ -38,19 +38,16 @@ def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def create_global_repo(compose, path_prefix):
|
def get_create_global_repo_cmd(compose, path_prefix):
|
||||||
createrepo_c = compose.conf["createrepo_c"]
|
createrepo_c = compose.conf["createrepo_c"]
|
||||||
createrepo_checksum = compose.conf["createrepo_checksum"]
|
createrepo_checksum = compose.conf["createrepo_checksum"]
|
||||||
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
||||||
repo_dir_global = compose.paths.work.arch_repo(arch="global")
|
repo_dir_global = compose.paths.work.arch_repo(arch="global")
|
||||||
msg = "Running createrepo for the global package set"
|
|
||||||
|
|
||||||
if compose.DEBUG and os.path.isdir(os.path.join(repo_dir_global, "repodata")):
|
if compose.DEBUG and os.path.isdir(os.path.join(repo_dir_global, "repodata")):
|
||||||
compose.log_warning("[SKIP ] %s" % msg)
|
compose.log_warning("[SKIP ] Running createrepo for the global package set")
|
||||||
return
|
return
|
||||||
|
|
||||||
compose.log_info("[BEGIN] %s" % msg)
|
|
||||||
|
|
||||||
# find an old compose suitable for repodata reuse
|
# find an old compose suitable for repodata reuse
|
||||||
old_compose_path = None
|
old_compose_path = None
|
||||||
update_md_path = None
|
update_md_path = None
|
||||||
@ -78,6 +75,13 @@ def create_global_repo(compose, path_prefix):
|
|||||||
pkglist=compose.paths.work.package_list(arch="global"), outputdir=repo_dir_global,
|
pkglist=compose.paths.work.package_list(arch="global"), outputdir=repo_dir_global,
|
||||||
baseurl="file://%s" % path_prefix, workers=compose.conf["createrepo_num_workers"],
|
baseurl="file://%s" % path_prefix, workers=compose.conf["createrepo_num_workers"],
|
||||||
update_md_path=update_md_path, checksum=createrepo_checksum)
|
update_md_path=update_md_path, checksum=createrepo_checksum)
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
|
def run_create_global_repo(compose, cmd):
|
||||||
|
msg = "Running createrepo for the global package set"
|
||||||
|
compose.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
run(cmd, logfile=compose.paths.log.log_file("global", "arch_repo"), show_cmd=True)
|
run(cmd, logfile=compose.paths.log.log_file("global", "arch_repo"), show_cmd=True)
|
||||||
compose.log_info("[DONE ] %s" % msg)
|
compose.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
|
@ -19,6 +19,8 @@ from six.moves import cPickle as pickle
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
|
import threading
|
||||||
|
|
||||||
from kobo.shortcuts import force_list, relative_path
|
from kobo.shortcuts import force_list, relative_path
|
||||||
from kobo.rpmlib import make_nvra
|
from kobo.rpmlib import make_nvra
|
||||||
|
|
||||||
@ -29,7 +31,10 @@ from pungi.arch import get_valid_arches
|
|||||||
from pungi.util import is_arch_multilib, retry, find_old_compose
|
from pungi.util import is_arch_multilib, retry, find_old_compose
|
||||||
from pungi import Modulemd
|
from pungi import Modulemd
|
||||||
|
|
||||||
from pungi.phases.pkgset.common import create_arch_repos, create_global_repo, populate_arch_pkgsets
|
from pungi.phases.pkgset.common import (create_arch_repos,
|
||||||
|
populate_arch_pkgsets,
|
||||||
|
get_create_global_repo_cmd,
|
||||||
|
run_create_global_repo)
|
||||||
from pungi.phases.gather import get_packages_to_gather
|
from pungi.phases.gather import get_packages_to_gather
|
||||||
|
|
||||||
import pungi.phases.pkgset.source
|
import pungi.phases.pkgset.source
|
||||||
@ -179,10 +184,16 @@ class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
|||||||
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
||||||
event_info = get_koji_event_info(compose, koji_wrapper)
|
event_info = get_koji_event_info(compose, koji_wrapper)
|
||||||
pkgset_global = populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)
|
pkgset_global = populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)
|
||||||
|
|
||||||
|
cmd = get_create_global_repo_cmd(compose, path_prefix)
|
||||||
|
t = threading.Thread(target=run_create_global_repo, args=(compose, cmd))
|
||||||
|
t.start()
|
||||||
|
|
||||||
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
|
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
|
||||||
package_sets["global"] = pkgset_global
|
package_sets["global"] = pkgset_global
|
||||||
|
|
||||||
create_global_repo(compose, path_prefix)
|
t.join()
|
||||||
|
|
||||||
for arch in compose.get_arches():
|
for arch in compose.get_arches():
|
||||||
# TODO: threads? runroot?
|
# TODO: threads? runroot?
|
||||||
create_arch_repos(compose, arch, path_prefix)
|
create_arch_repos(compose, arch, path_prefix)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import threading
|
||||||
from six.moves import cPickle as pickle
|
from six.moves import cPickle as pickle
|
||||||
|
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
@ -24,7 +25,10 @@ from pungi.arch import get_valid_arches
|
|||||||
from pungi.util import makedirs, is_arch_multilib
|
from pungi.util import makedirs, is_arch_multilib
|
||||||
from pungi.wrappers.pungi import PungiWrapper
|
from pungi.wrappers.pungi import PungiWrapper
|
||||||
|
|
||||||
from pungi.phases.pkgset.common import create_global_repo, create_arch_repos, populate_arch_pkgsets
|
from pungi.phases.pkgset.common import (run_create_global_repo,
|
||||||
|
get_create_global_repo_cmd,
|
||||||
|
create_arch_repos,
|
||||||
|
populate_arch_pkgsets)
|
||||||
from pungi.phases.gather import get_prepopulate_packages, get_packages_to_gather
|
from pungi.phases.gather import get_prepopulate_packages, get_packages_to_gather
|
||||||
from pungi.linker import LinkerThread, LinkerPool
|
from pungi.linker import LinkerThread, LinkerPool
|
||||||
|
|
||||||
@ -112,10 +116,16 @@ def get_pkgset_from_repos(compose):
|
|||||||
|
|
||||||
flist = sorted(set(flist))
|
flist = sorted(set(flist))
|
||||||
pkgset_global = populate_global_pkgset(compose, flist, path_prefix)
|
pkgset_global = populate_global_pkgset(compose, flist, path_prefix)
|
||||||
|
|
||||||
|
cmd = get_create_global_repo_cmd(compose, path_prefix)
|
||||||
|
t = threading.Thread(target=run_create_global_repo, args=(compose, cmd))
|
||||||
|
t.start()
|
||||||
|
|
||||||
# get_extra_packages(compose, pkgset_global)
|
# get_extra_packages(compose, pkgset_global)
|
||||||
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
|
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
|
||||||
|
|
||||||
create_global_repo(compose, path_prefix)
|
t.join()
|
||||||
|
|
||||||
for arch in compose.get_arches():
|
for arch in compose.get_arches():
|
||||||
# TODO: threads? runroot?
|
# TODO: threads? runroot?
|
||||||
create_arch_repos(compose, arch, path_prefix)
|
create_arch_repos(compose, arch, path_prefix)
|
||||||
|
@ -253,10 +253,11 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||||||
self.koji_wrapper.koji_proxy.getTag.return_value = TAG_INFO
|
self.koji_wrapper.koji_proxy.getTag.return_value = TAG_INFO
|
||||||
|
|
||||||
@mock.patch('pungi.phases.pkgset.sources.source_koji.create_arch_repos')
|
@mock.patch('pungi.phases.pkgset.sources.source_koji.create_arch_repos')
|
||||||
@mock.patch('pungi.phases.pkgset.sources.source_koji.create_global_repo')
|
@mock.patch('pungi.phases.pkgset.sources.source_koji.run_create_global_repo')
|
||||||
|
@mock.patch('pungi.phases.pkgset.sources.source_koji.get_create_global_repo_cmd')
|
||||||
@mock.patch('pungi.phases.pkgset.sources.source_koji.populate_arch_pkgsets')
|
@mock.patch('pungi.phases.pkgset.sources.source_koji.populate_arch_pkgsets')
|
||||||
@mock.patch('pungi.phases.pkgset.sources.source_koji.populate_global_pkgset')
|
@mock.patch('pungi.phases.pkgset.sources.source_koji.populate_global_pkgset')
|
||||||
def test_get_package_sets(self, pgp, pap, cgr, car):
|
def test_get_package_sets(self, pgp, pap, gcgrc, rcgr, car):
|
||||||
expected = {'x86_64': mock.Mock()}
|
expected = {'x86_64': mock.Mock()}
|
||||||
pap.return_value = expected
|
pap.return_value = expected
|
||||||
expected['global'] = pgp.return_value
|
expected['global'] = pgp.return_value
|
||||||
@ -273,8 +274,10 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||||||
EVENT_INFO)])
|
EVENT_INFO)])
|
||||||
self.assertEqual(pap.call_args_list,
|
self.assertEqual(pap.call_args_list,
|
||||||
[mock.call(self.compose, '/prefix', pgp.return_value)])
|
[mock.call(self.compose, '/prefix', pgp.return_value)])
|
||||||
self.assertEqual(cgr.call_args_list,
|
self.assertEqual(gcgrc.call_args_list,
|
||||||
[mock.call(self.compose, '/prefix')])
|
[mock.call(self.compose, '/prefix')])
|
||||||
|
self.assertEqual(rcgr.call_args_list,
|
||||||
|
[mock.call(self.compose, gcgrc.return_value)])
|
||||||
self.assertItemsEqual(car.call_args_list,
|
self.assertItemsEqual(car.call_args_list,
|
||||||
[mock.call(self.compose, 'x86_64', '/prefix'),
|
[mock.call(self.compose, 'x86_64', '/prefix'),
|
||||||
mock.call(self.compose, 'amd64', '/prefix')])
|
mock.call(self.compose, 'amd64', '/prefix')])
|
||||||
|
Loading…
Reference in New Issue
Block a user