From 6efaae19fd87f6b34d2a066292f41a56c40a603d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lubom=C3=ADr=20Sedl=C3=A1=C5=99?= Date: Wed, 24 Jul 2019 13:36:23 +0200 Subject: [PATCH] Remove debug mode MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This was already discouraged to not be used, and is a bad idea in current setup anyway. Removing this can simplify the code. Signed-off-by: Lubomír Sedlář --- bin/pungi-koji | 7 - pungi/compose.py | 45 ++--- pungi/phases/createrepo.py | 4 - pungi/phases/gather/__init__.py | 36 ++-- pungi/phases/gather/methods/method_deps.py | 9 - pungi/phases/init.py | 64 +++---- pungi/phases/pkgset/common.py | 8 - pungi/phases/pkgset/sources/source_koji.py | 197 ++++++++++---------- pungi/phases/pkgset/sources/source_repos.py | 22 +-- tests/helpers.py | 2 - tests/test_gather_phase.py | 20 -- tests/test_initphase.py | 53 ------ tests/test_pkgset_source_koji.py | 34 ---- 13 files changed, 156 insertions(+), 345 deletions(-) diff --git a/bin/pungi-koji b/bin/pungi-koji index debf4553..d592ed0e 100755 --- a/bin/pungi-koji +++ b/bin/pungi-koji @@ -76,12 +76,6 @@ def main(): action="append", help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.", ) - parser.add_argument( - "--debug-mode", - action="store_true", - default=False, - help="run pungi in DEBUG mode (DANGEROUS!)", - ) parser.add_argument( "--config", help="Config file", @@ -252,7 +246,6 @@ def main(): compose = Compose(conf, topdir=compose_dir, - debug=opts.debug_mode, skip_phases=opts.skip_phase, just_phases=opts.just_phase, old_composes=opts.old_composes, diff --git a/pungi/compose.py b/pungi/compose.py index abfcb81b..4bb77037 100644 --- a/pungi/compose.py +++ b/pungi/compose.py @@ -106,7 +106,7 @@ def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, class Compose(kobo.log.LoggingBase): - def __init__(self, conf, topdir, debug=False, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None, notifier=None): + def __init__(self, conf, topdir, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None, notifier=None): kobo.log.LoggingBase.__init__(self, logger) # TODO: check if minimal conf values are set self.conf = conf @@ -122,9 +122,6 @@ class Compose(kobo.log.LoggingBase): self.koji_event = koji_event or conf.get("koji_event") self.notifier = notifier - # intentionally upper-case (visible in the code) - self.DEBUG = debug - # path definitions self.paths = Paths(self) @@ -142,17 +139,6 @@ class Compose(kobo.log.LoggingBase): self.supported = True self.im = Images() - if self.DEBUG: - try: - self.im.load(self.paths.compose.metadata("images.json")) - except RuntimeError: - pass - # images.json doesn't exists - except IOError: - pass - # images.json is not a valid json file, for example, it's an empty file - except ValueError: - pass self.im.compose.id = self.compose_id self.im.compose.type = self.compose_type self.im.compose.date = self.compose_date @@ -235,23 +221,22 @@ class Compose(kobo.log.LoggingBase): variants_file = self.paths.work.variants_file(arch="global") msg = "Writing variants file: %s" % variants_file - if self.DEBUG and os.path.isfile(variants_file): - self.log_warning("[SKIP ] %s" % msg) + scm_dict = self.conf["variants_file"] + if isinstance(scm_dict, dict): + file_name = os.path.basename(scm_dict["file"]) + if scm_dict["scm"] == "file": + scm_dict["file"] = os.path.join( + self.config_dir, os.path.basename(scm_dict["file"]) + ) else: - scm_dict = self.conf["variants_file"] - if isinstance(scm_dict, dict): - file_name = os.path.basename(scm_dict["file"]) - if scm_dict["scm"] == "file": - scm_dict["file"] = os.path.join(self.config_dir, os.path.basename(scm_dict["file"])) - else: - file_name = os.path.basename(scm_dict) - scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict)) + file_name = os.path.basename(scm_dict) + scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict)) - self.log_debug(msg) - tmp_dir = self.mkdtemp(prefix="variants_file_") - get_file_from_scm(scm_dict, tmp_dir, logger=self._logger) - shutil.copy2(os.path.join(tmp_dir, file_name), variants_file) - shutil.rmtree(tmp_dir) + self.log_debug(msg) + tmp_dir = self.mkdtemp(prefix="variants_file_") + get_file_from_scm(scm_dict, tmp_dir, logger=self._logger) + shutil.copy2(os.path.join(tmp_dir, file_name), variants_file) + shutil.rmtree(tmp_dir) tree_arches = self.conf.get("tree_arches", None) tree_variants = self.conf.get("tree_variants", None) diff --git a/pungi/phases/createrepo.py b/pungi/phases/createrepo.py index 060ba82a..0af51ccb 100644 --- a/pungi/phases/createrepo.py +++ b/pungi/phases/createrepo.py @@ -122,10 +122,6 @@ def create_variant_repo(compose, arch, variant, pkg_type, modules_metadata=None) return createrepo_dirs.add(repo_dir) - if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info("[BEGIN] %s" % msg) # We only want delta RPMs for binary repos. diff --git a/pungi/phases/gather/__init__.py b/pungi/phases/gather/__init__.py index d7438a3f..c10f4c8e 100644 --- a/pungi/phases/gather/__init__.py +++ b/pungi/phases/gather/__init__.py @@ -91,13 +91,8 @@ class GatherPhase(PhaseBase): raise ValueError('\n'.join(errors)) def _write_manifest(self): - if self.compose.DEBUG and os.path.isfile(self.manifest_file): - self.compose.log_info( - "Skipping writing RPM manifest, already exists: %s" % self.manifest_file - ) - else: - self.compose.log_info("Writing RPM manifest: %s" % self.manifest_file) - self.manifest.dump(self.manifest_file) + self.compose.log_info("Writing RPM manifest: %s" % self.manifest_file) + self.manifest.dump(self.manifest_file) def run(self): pkg_map = gather_wrapper(self.compose, self.pkgset_phase.package_sets, @@ -517,23 +512,20 @@ def write_prepopulate_file(compose): prepopulate_file = os.path.join(compose.paths.work.topdir(arch="global"), "prepopulate.json") msg = "Writing prepopulate file: %s" % prepopulate_file - if compose.DEBUG and os.path.isfile(prepopulate_file): - compose.log_warning("[SKIP ] %s" % msg) + scm_dict = compose.conf["gather_prepopulate"] + if isinstance(scm_dict, dict): + file_name = os.path.basename(scm_dict["file"]) + if scm_dict["scm"] == "file": + scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"])) else: - scm_dict = compose.conf["gather_prepopulate"] - if isinstance(scm_dict, dict): - file_name = os.path.basename(scm_dict["file"]) - if scm_dict["scm"] == "file": - scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"])) - else: - file_name = os.path.basename(scm_dict) - scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict)) + file_name = os.path.basename(scm_dict) + scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict)) - compose.log_debug(msg) - tmp_dir = compose.mkdtemp(prefix="prepopulate_file_") - get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) - shutil.copy2(os.path.join(tmp_dir, file_name), prepopulate_file) - shutil.rmtree(tmp_dir) + compose.log_debug(msg) + tmp_dir = compose.mkdtemp(prefix="prepopulate_file_") + get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) + shutil.copy2(os.path.join(tmp_dir, file_name), prepopulate_file) + shutil.rmtree(tmp_dir) def get_prepopulate_packages(compose, arch, variant, include_arch=True): diff --git a/pungi/phases/gather/methods/method_deps.py b/pungi/phases/gather/methods/method_deps.py index e82c9070..618aa3b1 100644 --- a/pungi/phases/gather/methods/method_deps.py +++ b/pungi/phases/gather/methods/method_deps.py @@ -89,10 +89,6 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch, source_name=source_name) msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg) - if compose.DEBUG and os.path.isfile(pungi_cfg): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info(msg) repos = { @@ -160,11 +156,6 @@ def resolve_deps(compose, arch, variant, source_name=None): pungi_log = compose.paths.work.pungi_log(arch, variant, source_name=source_name) msg = "Running pungi (arch: %s, variant: %s)" % (arch, variant) - if compose.DEBUG and os.path.exists(pungi_log): - compose.log_warning("[SKIP ] %s" % msg) - with open(pungi_log, "r") as f: - res, broken_deps, _ = pungi_wrapper.parse_log(f) - return res, broken_deps compose.log_info("[BEGIN] %s" % msg) pungi_conf = compose.paths.work.pungi_conf(arch, variant, source_name=source_name) diff --git a/pungi/phases/init.py b/pungi/phases/init.py index d06a927e..f41da5fd 100644 --- a/pungi/phases/init.py +++ b/pungi/phases/init.py @@ -90,23 +90,20 @@ def write_global_comps(compose): comps_file_global = compose.paths.work.comps(arch="global") msg = "Writing global comps file: %s" % comps_file_global - if compose.DEBUG and os.path.isfile(comps_file_global): - compose.log_warning("[SKIP ] %s" % msg) + scm_dict = compose.conf["comps_file"] + if isinstance(scm_dict, dict): + comps_name = os.path.basename(scm_dict["file"]) + if scm_dict["scm"] == "file": + scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"]) else: - scm_dict = compose.conf["comps_file"] - if isinstance(scm_dict, dict): - comps_name = os.path.basename(scm_dict["file"]) - if scm_dict["scm"] == "file": - scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"]) - else: - comps_name = os.path.basename(scm_dict) - scm_dict = os.path.join(compose.config_dir, scm_dict) + comps_name = os.path.basename(scm_dict) + scm_dict = os.path.join(compose.config_dir, scm_dict) - compose.log_debug(msg) - tmp_dir = compose.mkdtemp(prefix="comps_") - get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) - shutil.copy2(os.path.join(tmp_dir, comps_name), comps_file_global) - shutil.rmtree(tmp_dir) + compose.log_debug(msg) + tmp_dir = compose.mkdtemp(prefix="comps_") + get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) + shutil.copy2(os.path.join(tmp_dir, comps_name), comps_file_global) + shutil.rmtree(tmp_dir) return comps_file_global @@ -115,10 +112,6 @@ def write_arch_comps(compose, arch): comps_file_arch = compose.paths.work.comps(arch=arch) msg = "Writing comps file for arch '%s': %s" % (arch, comps_file_arch) - if compose.DEBUG and os.path.isfile(comps_file_arch): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_debug(msg) run(["comps_filter", "--arch=%s" % arch, "--no-cleanup", "--output=%s" % comps_file_arch, @@ -145,17 +138,6 @@ def write_variant_comps(compose, arch, variant): comps_file = compose.paths.work.comps(arch=arch, variant=variant) msg = "Writing comps file (arch: %s, variant: %s): %s" % (arch, variant, comps_file) - if compose.DEBUG and os.path.isfile(comps_file): - # read display_order and groups for environments (needed for live images) - comps = CompsWrapper(comps_file) - # groups = variant.groups - comps.filter_groups(variant.groups) - if compose.conf["comps_filter_environments"]: - comps.filter_environments(variant.environments) - - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_debug(msg) cmd = [ "comps_filter", @@ -194,17 +176,17 @@ def create_comps_repo(compose, arch, variant): comps_repo = compose.paths.work.comps_repo(arch=arch, variant=variant) comps_path = compose.paths.work.comps(arch=arch, variant=variant) msg = "Creating comps repo for arch '%s' variant '%s'" % (arch, variant.uid if variant else None) - if compose.DEBUG and os.path.isdir(os.path.join(comps_repo, "repodata")): - compose.log_warning("[SKIP ] %s" % msg) - else: - compose.log_info("[BEGIN] %s" % msg) - cmd = repo.get_createrepo_cmd(comps_repo, database=False, - outputdir=comps_repo, groupfile=comps_path, - checksum=createrepo_checksum) - logfile = 'comps_repo-%s' % variant if variant else 'comps_repo' - run(cmd, logfile=compose.paths.log.log_file(arch, logfile), - show_cmd=True) - compose.log_info("[DONE ] %s" % msg) + + compose.log_info("[BEGIN] %s" % msg) + cmd = repo.get_createrepo_cmd( + comps_repo, database=False, + outputdir=comps_repo, + groupfile=comps_path, + checksum=createrepo_checksum, + ) + logfile = "comps_repo-%s" % variant if variant else "comps_repo" + run(cmd, logfile=compose.paths.log.log_file(arch, logfile), show_cmd=True) + compose.log_info("[DONE ] %s" % msg) def write_module_defaults(compose): diff --git a/pungi/phases/pkgset/common.py b/pungi/phases/pkgset/common.py index 553b605f..84aff998 100644 --- a/pungi/phases/pkgset/common.py +++ b/pungi/phases/pkgset/common.py @@ -48,10 +48,6 @@ def get_create_global_repo_cmd(compose, path_prefix): repo = CreaterepoWrapper(createrepo_c=createrepo_c) repo_dir_global = compose.paths.work.arch_repo(arch="global") - if compose.DEBUG and os.path.isdir(os.path.join(repo_dir_global, "repodata")): - compose.log_warning("[SKIP ] Running createrepo for the global package set") - return - # find an old compose suitable for repodata reuse old_compose_path = None update_md_path = None @@ -108,10 +104,6 @@ def _create_arch_repo(worker_thread, args, task_num): repo_dir = compose.paths.work.arch_repo(arch=arch) msg = "Running createrepo for arch '%s'" % arch - if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info("[BEGIN] %s" % msg) cmd = repo.get_createrepo_cmd(path_prefix, update=True, database=False, skip_stat=True, pkglist=compose.paths.work.package_list(arch=arch), outputdir=repo_dir, diff --git a/pungi/phases/pkgset/sources/source_koji.py b/pungi/phases/pkgset/sources/source_koji.py index 6564963e..51a8f254 100644 --- a/pungi/phases/pkgset/sources/source_koji.py +++ b/pungi/phases/pkgset/sources/source_koji.py @@ -600,104 +600,103 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event): inherit = compose.conf["pkgset_koji_inherit"] inherit_modules = compose.conf["pkgset_koji_inherit_modules"] - global_pkgset_path = os.path.join( - compose.paths.work.topdir(arch="global"), "pkgset_global.pickle") - if compose.DEBUG and os.path.isfile(global_pkgset_path): - msg = "Populating the global package set from tag '%s'" % compose_tags - compose.log_warning("[SKIP ] %s" % msg) - with open(global_pkgset_path, "rb") as f: - global_pkgset = pickle.load(f) - else: - global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( + + global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( + koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, + arches=all_arches) + + old_file_cache_path = _find_old_file_cache_path(compose) + old_file_cache = None + if old_file_cache_path: + compose.log_info("Reusing old PKGSET file cache from %s" % old_file_cache_path) + old_file_cache = pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache( + old_file_cache_path + ) + global_pkgset.set_old_file_cache(old_file_cache) + + # Get package set for each compose tag and merge it to global package + # list. Also prepare per-variant pkgset, because we do not have list + # of binary RPMs in module definition - there is just list of SRPMs. + for compose_tag in compose_tags: + compose.log_info( + "Populating the global package set from tag '%s'" % compose_tag + ) + if compose_tag in pkgset_koji_tags: + extra_builds = force_list(compose.conf.get("pkgset_koji_builds", [])) + else: + extra_builds = [] + pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, - arches=all_arches) + arches=all_arches, packages=packages_to_gather, + allow_invalid_sigkeys=allow_invalid_sigkeys, + populate_only_packages=populate_only_packages_to_gather, + cache_region=compose.cache_region, + extra_builds=extra_builds) + if old_file_cache: + pkgset.set_old_file_cache(old_file_cache) + # Create a filename for log with package-to-tag mapping. The tag + # name is included in filename, so any slashes in it are replaced + # with underscores just to be safe. + logfile = compose.paths.log.log_file( + None, "packages_from_%s" % compose_tag.replace("/", "_") + ) + is_traditional = compose_tag in compose.conf.get("pkgset_koji_tag", []) + should_inherit = inherit if is_traditional else inherit_modules - old_file_cache_path = _find_old_file_cache_path(compose) - old_file_cache = None - if old_file_cache_path: - compose.log_info("Reusing old PKGSET file cache from %s" % old_file_cache_path) - old_file_cache = pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache( - old_file_cache_path - ) - global_pkgset.set_old_file_cache(old_file_cache) + # If we're processing a modular tag, we have an exact list of + # packages that will be used. This is basically a workaround for + # tagging working on build level, not rpm level. A module tag may + # build a package but not want it included. This should include + # only packages that are actually in modules. It's possible two + # module builds will use the same tag, particularly a -devel module + # is sharing a tag with its regular version. + # The ultimate goal of the mapping is to avoid a package built in modular + # tag to be used as a dependency of some non-modular package. + modular_packages = set() + for variant in compose.all_variants.values(): + for nsvc, modular_tag in variant.module_uid_to_koji_tag.items(): + if modular_tag != compose_tag: + # Not current tag, skip it + continue + for arch_modules in variant.arch_mmds.values(): + for rpm_nevra in arch_modules[nsvc].get_rpm_artifacts(): + nevra = parse_nvra(rpm_nevra) + modular_packages.add((nevra["name"], nevra["arch"])) - # Get package set for each compose tag and merge it to global package - # list. Also prepare per-variant pkgset, because we do not have list - # of binary RPMs in module definition - there is just list of SRPMs. - for compose_tag in compose_tags: - compose.log_info("Populating the global package set from tag " - "'%s'" % compose_tag) - if compose_tag in pkgset_koji_tags: - extra_builds = force_list(compose.conf.get("pkgset_koji_builds", [])) - else: - extra_builds = [] - pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( - koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, - arches=all_arches, packages=packages_to_gather, - allow_invalid_sigkeys=allow_invalid_sigkeys, - populate_only_packages=populate_only_packages_to_gather, - cache_region=compose.cache_region, - extra_builds=extra_builds) - if old_file_cache: - pkgset.set_old_file_cache(old_file_cache) - # Create a filename for log with package-to-tag mapping. The tag - # name is included in filename, so any slashes in it are replaced - # with underscores just to be safe. - logfile = compose.paths.log.log_file( - None, 'packages_from_%s' % compose_tag.replace('/', '_')) - is_traditional = compose_tag in compose.conf.get('pkgset_koji_tag', []) - should_inherit = inherit if is_traditional else inherit_modules + pkgset.populate( + compose_tag, + event, + inherit=should_inherit, + logfile=logfile, + include_packages=modular_packages, + ) + for variant in compose.all_variants.values(): + if compose_tag in variant_tags[variant]: - # If we're processing a modular tag, we have an exact list of - # packages that will be used. This is basically a workaround for - # tagging working on build level, not rpm level. A module tag may - # build a package but not want it included. This should include - # only packages that are actually in modules. It's possible two - # module builds will use the same tag, particularly a -devel module - # is sharing a tag with its regular version. - # The ultimate goal of the mapping is to avoid a package built in modular - # tag to be used as a dependency of some non-modular package. - modular_packages = set() - for variant in compose.all_variants.values(): - for nsvc, modular_tag in variant.module_uid_to_koji_tag.items(): - if modular_tag != compose_tag: - # Not current tag, skip it - continue - for arch_modules in variant.arch_mmds.values(): - for rpm_nevra in arch_modules[nsvc].get_rpm_artifacts(): - nevra = parse_nvra(rpm_nevra) - modular_packages.add((nevra["name"], nevra["arch"])) + # If it's a modular tag, store the package set for the module. + for nsvc, koji_tag in variant.module_uid_to_koji_tag.items(): + if compose_tag == koji_tag: + variant.nsvc_to_pkgset[nsvc] = pkgset - pkgset.populate( - compose_tag, - event, - inherit=should_inherit, - logfile=logfile, - include_packages=modular_packages, - ) - for variant in compose.all_variants.values(): - if compose_tag in variant_tags[variant]: + # Optimization for case where we have just single compose + # tag - we do not have to merge in this case... + if len(variant_tags[variant]) == 1: + variant.pkgset = pkgset + else: + variant.pkgset.fast_merge(pkgset) + # Optimization for case where we have just single compose + # tag - we do not have to merge in this case... + if len(compose_tags) == 1: + global_pkgset = pkgset + else: + global_pkgset.fast_merge(pkgset) - # If it's a modular tag, store the package set for the module. - for nsvc, koji_tag in variant.module_uid_to_koji_tag.items(): - if compose_tag == koji_tag: - variant.nsvc_to_pkgset[nsvc] = pkgset - - # Optimization for case where we have just single compose - # tag - we do not have to merge in this case... - if len(variant_tags[variant]) == 1: - variant.pkgset = pkgset - else: - variant.pkgset.fast_merge(pkgset) - # Optimization for case where we have just single compose - # tag - we do not have to merge in this case... - if len(compose_tags) == 1: - global_pkgset = pkgset - else: - global_pkgset.fast_merge(pkgset) - with open(global_pkgset_path, 'wb') as f: - data = pickle.dumps(global_pkgset, protocol=pickle.HIGHEST_PROTOCOL) - f.write(data) + global_pkgset_path = os.path.join( + compose.paths.work.topdir(arch="global"), "pkgset_global.pickle" + ) + with open(global_pkgset_path, "wb") as f: + data = pickle.dumps(global_pkgset, protocol=pickle.HIGHEST_PROTOCOL) + f.write(data) # write global package list global_pkgset.save_file_list( @@ -711,16 +710,12 @@ def get_koji_event_info(compose, koji_wrapper): event_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-event") msg = "Getting koji event" - if compose.DEBUG and os.path.exists(event_file): - compose.log_warning("[SKIP ] %s" % msg) - result = json.load(open(event_file, "r")) + result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file) + if compose.koji_event: + compose.log_info("Setting koji event to a custom value: %s" % compose.koji_event) else: - result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file) - if compose.koji_event: - compose.log_info("Setting koji event to a custom value: %s" % compose.koji_event) - else: - compose.log_info(msg) - compose.log_info("Koji event: %s" % result["id"]) + compose.log_info(msg) + compose.log_info("Koji event: %s" % result["id"]) return result diff --git a/pungi/phases/pkgset/sources/source_repos.py b/pungi/phases/pkgset/sources/source_repos.py index 2a491e42..06014b15 100644 --- a/pungi/phases/pkgset/sources/source_repos.py +++ b/pungi/phases/pkgset/sources/source_repos.py @@ -140,16 +140,14 @@ def populate_global_pkgset(compose, file_list, path_prefix): msg = "Populating the global package set from a file list" global_pkgset_path = os.path.join(compose.paths.work.topdir(arch="global"), "packages.pickle") - if compose.DEBUG and os.path.isfile(global_pkgset_path): - compose.log_warning("[SKIP ] %s" % msg) - with open(global_pkgset_path, "rb") as f: - pkgset = pickle.load(f) - else: - compose.log_info(msg) - pkgset = pungi.phases.pkgset.pkgsets.FilelistPackageSet(compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES) - pkgset.populate(file_list) - with open(global_pkgset_path, "wb") as f: - pickle.dump(pkgset, f, protocol=pickle.HIGHEST_PROTOCOL) + + compose.log_info(msg) + pkgset = pungi.phases.pkgset.pkgsets.FilelistPackageSet( + compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES + ) + pkgset.populate(file_list) + with open(global_pkgset_path, "wb") as f: + pickle.dump(pkgset, f, protocol=pickle.HIGHEST_PROTOCOL) # write global package list pkgset.save_file_list(compose.paths.work.package_list(arch="global"), remove_path_prefix=path_prefix) @@ -162,10 +160,6 @@ def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, pack pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch) msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg) - if compose.DEBUG and os.path.isfile(pungi_cfg): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info(msg) packages, grps = get_packages_to_gather(compose, arch, variant) diff --git a/tests/helpers.py b/tests/helpers.py index f4afdc10..0e8e433d 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -175,8 +175,6 @@ class DummyCompose(object): self.should_create_yum_database = True self.cache_region = None - self.DEBUG = False - def setup_optional(self): self.all_variants['Server-optional'] = MockVariant( uid='Server-optional', arches=['x86_64'], type='optional') diff --git a/tests/test_gather_phase.py b/tests/test_gather_phase.py index 84ff6f21..cec5fc11 100644 --- a/tests/test_gather_phase.py +++ b/tests/test_gather_phase.py @@ -861,26 +861,6 @@ class TestGatherPhase(helpers.PungiTestCase): self.assertEqual(gather_wrapper.call_args_list, []) self.assertTrue(os.path.isfile(os.path.join(self.topdir, 'compose', 'metadata', 'rpms.json'))) - @mock.patch('pungi.phases.gather.link_files') - @mock.patch('pungi.phases.gather.gather_wrapper') - def test_does_not_write_in_debug_mode(self, gather_wrapper, link_files): - pkgset_phase = mock.Mock() - compose = helpers.DummyCompose(self.topdir, {}) - compose.notifier = mock.Mock() - compose.DEBUG = True - - rpms_file = helpers.touch( - os.path.join(self.topdir, 'compose', 'metadata', 'rpms.json'), "hello" - ) - - phase = gather.GatherPhase(compose, pkgset_phase) - phase.stop() - - self.assertEqual(gather_wrapper.call_args_list, []) - self.assertTrue(os.path.isfile(rpms_file)) - with open(rpms_file) as fh: - self.assertEqual(fh.read(), "hello") - def test_validates_wrong_requiring_variant(self): pkgset_phase = mock.Mock() compose = helpers.DummyCompose( diff --git a/tests/test_initphase.py b/tests/test_initphase.py index aa13f8d3..5a238625 100644 --- a/tests/test_initphase.py +++ b/tests/test_initphase.py @@ -185,16 +185,6 @@ class TestWriteArchComps(PungiTestCase): '--output=%s/work/x86_64/comps/comps-x86_64.xml' % self.topdir, self.topdir + '/work/global/comps/comps-global.xml'])]) - @mock.patch('pungi.phases.init.run') - def test_run_in_debug(self, run): - compose = DummyCompose(self.topdir, {}) - compose.DEBUG = True - touch(self.topdir + '/work/x86_64/comps/comps-x86_64.xml') - - init.write_arch_comps(compose, 'x86_64') - - self.assertEqual(run.mock_calls, []) - class TestCreateCompsRepo(PungiTestCase): @@ -232,33 +222,9 @@ class TestCreateCompsRepo(PungiTestCase): logfile=self.topdir + '/logs/x86_64/comps_repo-Server.x86_64.log', show_cmd=True)]) - @mock.patch('pungi.phases.init.run') - def test_run_in_debug(self, run): - compose = DummyCompose(self.topdir, { - 'createrepo_checksum': 'sha256', - }) - compose.DEBUG = True - os.makedirs(self.topdir + '/work/x86_64/comps_repo/repodata') - - init.create_comps_repo(compose, 'x86_64', None) - - self.assertEqual(run.mock_calls, []) - class TestWriteGlobalComps(PungiTestCase): - @mock.patch('shutil.copy2') - @mock.patch('pungi.phases.init.get_file_from_scm') - def test_run_in_debug(self, get_file, copy2): - compose = DummyCompose(self.topdir, {'comps_file': 'some-file.xml'}) - compose.DEBUG = True - touch(self.topdir + '/work/global/comps/comps-global.xml') - - init.write_global_comps(compose) - - self.assertEqual(get_file.mock_calls, []) - self.assertEqual(copy2.mock_calls, []) - @mock.patch('pungi.phases.init.get_file_from_scm') def test_run_local_file(self, get_file): compose = DummyCompose(self.topdir, {'comps_file': 'some-file.xml'}) @@ -417,25 +383,6 @@ class TestWriteVariantComps(PungiTestCase): [mock.call(init.UNMATCHED_GROUP_MSG % ('Server', 'x86_64', 'foo')), mock.call(init.UNMATCHED_GROUP_MSG % ('Server', 'x86_64', 'bar'))]) - @mock.patch('pungi.phases.init.run') - @mock.patch('pungi.phases.init.CompsWrapper') - def test_run_in_debug(self, CompsWrapper, run): - compose = DummyCompose(self.topdir, {}) - compose.DEBUG = True - variant = compose.variants['Server'] - touch(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml') - - init.write_variant_comps(compose, 'x86_64', variant) - - self.assertEqual(run.mock_calls, []) - self.assertEqual(CompsWrapper.call_args_list, - [mock.call(self.topdir + '/work/x86_64/comps/comps-Server.x86_64.xml')]) - comps = CompsWrapper.return_value - self.assertEqual(comps.filter_groups.mock_calls, [mock.call(variant.groups)]) - self.assertEqual(comps.filter_environments.mock_calls, - [mock.call(variant.environments)]) - self.assertEqual(comps.write_comps.mock_calls, []) - class TestGetLookasideGroups(PungiTestCase): def test_toplevel_variant(self): diff --git a/tests/test_pkgset_source_koji.py b/tests/test_pkgset_source_koji.py index f4f1d895..b85948b7 100644 --- a/tests/test_pkgset_source_koji.py +++ b/tests/test_pkgset_source_koji.py @@ -72,19 +72,6 @@ class TestGetKojiEvent(helpers.PungiTestCase): with open(self.event_file) as f: self.assertEqual(json.load(f), EVENT_INFO) - def test_gets_last_event_in_debug_mode(self): - self.compose.DEBUG = True - self.compose.koji_event = None - koji_wrapper = mock.Mock() - helpers.touch(self.event_file, json.dumps(EVENT_INFO)) - - event = source_koji.get_koji_event_info(self.compose, koji_wrapper) - - self.assertEqual(event, EVENT_INFO) - self.assertItemsEqual(koji_wrapper.mock_calls, []) - with open(self.event_file) as f: - self.assertEqual(json.load(f), EVENT_INFO) - class TestPopulateGlobalPkgset(helpers.PungiTestCase): def setUp(self): @@ -180,27 +167,6 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase): with open(self.pkgset_path) as f: self.assertEqual(f.read(), 'DATA') - @mock.patch('six.moves.cPickle.load') - def test_populate_in_debug_mode(self, pickle_load): - helpers.touch(self.pkgset_path, 'DATA') - self.compose.DEBUG = True - - pickle_load.return_value - - with mock.patch('pungi.phases.pkgset.sources.source_koji.open', - mock.mock_open(), create=True) as m: - pkgset = source_koji.populate_global_pkgset( - self.compose, self.koji_wrapper, '/prefix', 123456) - - self.assertEqual(pickle_load.call_args_list, - [mock.call(m.return_value)]) - self.assertIs(pkgset, pickle_load.return_value) - self.assertEqual( - pkgset.mock_calls, - [mock.call.save_file_list(self.topdir + '/work/global/package_list/global.conf', - remove_path_prefix='/prefix'), - mock.call.save_file_cache(self.topdir + '/work/global/pkgset_file_cache.pickle')]) - @mock.patch('six.moves.cPickle.dumps') @mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.populate') @mock.patch('pungi.phases.pkgset.pkgsets.KojiPackageSet.save_file_list')