Initial code merge for Pungi 4.0.

This commit is contained in:
Daniel Mach 2015-02-10 08:19:34 -05:00
parent f5c6d44000
commit f116d9384f
57 changed files with 8759 additions and 10 deletions

View File

@ -1,9 +1,6 @@
include Authors
include Changelog
include AUTHORS
include COPYING
include GPL
include TESTING
include ToDo
include pungi.spec
include share/*
include doc/*
recursive-include share/*
recursive-include doc/*

75
TODO Normal file
View File

@ -0,0 +1,75 @@
Random thoughts on what needs to be done before Pungi 4.0 is completed.
Define building blocks and their metadata
=========================================
* rpms in yum repos
* comps
* kickstart trees
* isos
* kickstart trees
* bootable images
* readme files
* license(s)
Compose structure
=================
* topdir
* work, logs, etc.
* compose
* $variant
* $arch
* $content_type (rpms, isos, kickstart trees, etc.)
* actual content
Split Pungi into smaller well-defined tools
===========================================
* process initial packages
* comps
* json mapping
* ???
* grab initial package set
* yum repos
* koji instance (basically what mash does today)
* resolve deps (gather)
* self-hosting
* fulltree
* multilib
* langpacks
* create repos
* create install images
* lorax
* buildinstall
* create isos
* isos
* bootable
* hybrid
* implant md5sum
* jigdo
* checksums
* run tests
* just quick sanity tests
* notification
* email
* messagebus
Unsorted
========
* run any tasks in koji or local host
* support for non-rpm content? (java artifacts, etc.)
* docs!
* unit tests!
* use productmd for metadata: https://github.com/release-engineering/productmd/
* use next-gen tools: createrepo_c, mergerepo_c, dnf, hawkey, libcomps

340
bin/pungi Executable file
View File

@ -0,0 +1,340 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import optparse
import logging
import locale
import datetime
import getpass
import socket
import json
import pipes
here = sys.path[0]
if here != '/usr/bin':
# Git checkout
sys.path[0] = os.path.dirname(here)
from pungi import __version__
# force C locales
locale.setlocale(locale.LC_ALL, "C")
COMPOSE = None
def main():
global COMPOSE
parser = optparse.OptionParser()
parser.add_option(
"--target-dir",
metavar="PATH",
help="a compose is created under this directory",
)
parser.add_option(
"--label",
help="specify compose label (example: Snapshot-1.0); required for production composes"
)
parser.add_option(
"--no-label",
action="store_true",
default=False,
help="make a production compose without label"
)
parser.add_option(
"--supported",
action="store_true",
default=False,
help="set supported flag on media (automatically on for 'RC-x.y' labels)"
)
parser.add_option(
"--old-composes",
metavar="PATH",
dest="old_composes",
default=[],
action="append",
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
)
parser.add_option(
"--compose-dir",
metavar="PATH",
help="reuse an existing compose directory (DANGEROUS!)",
)
parser.add_option(
"--debug-mode",
action="store_true",
default=False,
help="run pungi in DEBUG mode (DANGEROUS!)",
)
parser.add_option(
"--config",
help="Config file"
)
parser.add_option(
"--skip-phase",
metavar="PHASE",
action="append",
default=[],
help="skip a compose phase",
)
parser.add_option(
"--just-phase",
metavar="PHASE",
action="append",
default=[],
help="run only a specified compose phase",
)
parser.add_option(
"--nightly",
action="store_const",
const="nightly",
dest="compose_type",
help="make a nightly compose",
)
parser.add_option(
"--test",
action="store_const",
const="test",
dest="compose_type",
help="make a test compose",
)
parser.add_option(
"--koji-event",
metavar="ID",
type="int",
help="specify a koji event for populating package set",
)
parser.add_option(
"--version",
action="store_true",
help="output version information and exit",
)
opts, args = parser.parse_args()
if opts.version:
print("pungi %s" % __version__)
sys.exit(0)
if opts.target_dir and opts.compose_dir:
parser.error("cannot specify --target-dir and --compose-dir at once")
if not opts.target_dir and not opts.compose_dir:
parser.error("please specify a target directory")
if opts.target_dir and not opts.compose_dir:
opts.target_dir = os.path.abspath(opts.target_dir)
if not os.path.isdir(opts.target_dir):
parser.error("The target directory does not exist or is not a directory: %s" % opts.target_dir)
else:
opts.compose_dir = os.path.abspath(opts.compose_dir)
if not os.path.isdir(opts.compose_dir):
parser.error("The compose directory does not exist or is not a directory: %s" % opts.compose_dir)
compose_type = opts.compose_type or "production"
if compose_type == "production" and not opts.label and not opts.no_label:
parser.error("must specify label for a production compose")
if not opts.config:
parser.error("please specify a config")
opts.config = os.path.abspath(opts.config)
# check if all requirements are met
import pungi.checks
if not pungi.checks.check():
sys.exit(1)
import kobo.conf
import kobo.log
import productmd.composeinfo.compose
if opts.label:
try:
productmd.composeinfo.compose.verify_label(opts.label)
except ValueError as ex:
parser.error(str(ex))
from pungi.compose import Compose
logger = logging.Logger("Pungi")
kobo.log.add_stderr_logger(logger)
conf = kobo.conf.PyConfigParser()
conf.load_from_file(opts.config)
if opts.target_dir:
compose_dir = Compose.get_compose_dir(opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label)
else:
compose_dir = opts.compose_dir
compose = Compose(conf, topdir=compose_dir, debug=opts.debug_mode, skip_phases=opts.skip_phase, just_phases=opts.just_phase,
old_composes=opts.old_composes, koji_event=opts.koji_event, supported=opts.supported, logger=logger)
kobo.log.add_file_logger(logger, compose.paths.log.log_file("global", "pungi.log"))
COMPOSE = compose
run_compose(compose)
def run_compose(compose):
import pungi.phases
import pungi.metadata
compose.write_status("STARTED")
compose.log_info("Host: %s" % socket.gethostname())
compose.log_info("User name: %s" % getpass.getuser())
compose.log_info("Working directory: %s" % os.getcwd())
compose.log_info("Command line: %s" % " ".join([pipes.quote(arg) for arg in sys.argv]))
compose.log_info("Compose top directory: %s" % compose.topdir)
compose.read_variants()
# dump the config file
date_str = datetime.datetime.strftime(datetime.datetime.now(), "%F_%X").replace(":", "-")
config_dump = compose.paths.log.log_file("global", "config-dump_%s" % date_str)
open(config_dump, "w").write(json.dumps(compose.conf, sort_keys=True, indent=4))
# initialize all phases
init_phase = pungi.phases.InitPhase(compose)
pkgset_phase = pungi.phases.PkgsetPhase(compose)
createrepo_phase = pungi.phases.CreaterepoPhase(compose)
buildinstall_phase = pungi.phases.BuildinstallPhase(compose)
productimg_phase = pungi.phases.ProductimgPhase(compose, pkgset_phase)
gather_phase = pungi.phases.GatherPhase(compose, pkgset_phase)
extrafiles_phase = pungi.phases.ExtraFilesPhase(compose, pkgset_phase)
createiso_phase = pungi.phases.CreateisoPhase(compose)
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
test_phase = pungi.phases.TestPhase(compose)
# check if all config options are set
errors = []
for phase in (init_phase, pkgset_phase, buildinstall_phase, productimg_phase, gather_phase, createiso_phase, test_phase):
if phase.skip():
continue
try:
phase.validate()
except ValueError as ex:
for i in str(ex).splitlines():
errors.append("%s: %s" % (phase.name.upper(), i))
if errors:
for i in errors:
compose.log_error(i)
print(i)
sys.exit(1)
# INIT phase
init_phase.start()
init_phase.stop()
# PKGSET phase
pkgset_phase.start()
pkgset_phase.stop()
# BUILDINSTALL phase - start
buildinstall_phase.start()
# GATHER phase
gather_phase.start()
gather_phase.stop()
# EXTRA_FILES phase
extrafiles_phase.start()
extrafiles_phase.stop()
# CREATEREPO phase
createrepo_phase.start()
createrepo_phase.stop()
# BUILDINSTALL phase
# must finish before PRODUCTIMG
# must finish before CREATEISO
buildinstall_phase.stop()
if not buildinstall_phase.skip():
buildinstall_phase.copy_files()
# PRODUCTIMG phase
productimg_phase.start()
productimg_phase.stop()
# write treeinfo before ISOs are created
for variant in compose.get_variants():
for arch in variant.arches + ["src"]:
pungi.metadata.write_tree_info(compose, arch, variant)
# write .discinfo and media.repo before ISOs are created
for variant in compose.get_variants(recursive=True):
if variant.type == "addon":
continue
for arch in variant.arches + ["src"]:
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
# CREATEISO and LIVEIMAGES phases
createiso_phase.start()
liveimages_phase.start()
createiso_phase.stop()
liveimages_phase.stop()
# merge checksum files
for variant in compose.get_variants(types=["variant", "layered-product"]):
for arch in variant.arches + ["src"]:
iso_dir = compose.paths.compose.iso_dir(arch, variant, create_dir=False)
if not iso_dir or not os.path.exists(iso_dir):
continue
for checksum_type in ("md5", "sha1", "sha256"):
checksum_upper = "%sSUM" % checksum_type.upper()
checksums = sorted([i for i in os.listdir(iso_dir) if i.endswith(".%s" % checksum_upper)])
fo = open(os.path.join(iso_dir, checksum_upper), "w")
for i in checksums:
data = open(os.path.join(iso_dir, i), "r").read()
fo.write(data)
pungi.metadata.write_compose_info(compose)
compose.im.dump(compose.paths.compose.metadata("images.json")
# TEST phase
test_phase.start()
test_phase.stop()
# create a latest symlink
compose_dir = os.path.basename(compose.topdir)
symlink_name = "latest-%s-%s" % (compose.conf["product_short"], ".".join(compose.conf["product_version"].split(".")[:-1]))
if compose.conf["product_is_layered"]:
symlink_name += "-%s-%s" % (compose.conf["base_product_short"], compose.conf["base_product_version"])
symlink = os.path.join(compose.topdir, "..", symlink_name)
try:
os.unlink(symlink)
except OSError as ex:
if ex.errno != 2:
raise
try:
os.symlink(compose_dir, symlink)
except Exception as ex:
print("ERROR: couldn't create latest symlink: %s" % ex)
compose.log_info("Compose finished: %s" % compose.topdir)
compose.write_status("FINISHED")
if __name__ == "__main__":
try:
main()
except (Exception, KeyboardInterrupt) as ex:
if COMPOSE:
tb_path = COMPOSE.paths.log.log_file("global", "traceback")
COMPOSE.log_error("Exception: %s" % ex)
COMPOSE.log_error("Extended traceback in: %s" % tb_path)
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
COMPOSE.write_status("DOOMED")
import kobo.tback
open(tb_path, "w").write(kobo.tback.Traceback().get_traceback())
else:
print("Exception: %s" % ex)
sys.stdout.flush()
sys.stderr.flush()
raise

View File

@ -45,7 +45,7 @@ def get_valid_multilib_arches(tree_arch):
multilib_arch = get_multilib_arch(yum_arch)
if not multilib_arch:
return []
return [ i for i in rpmUtils.arch.getArchList(multilib_arch) if i not in ("noarch", "src") ]
return [i for i in rpmUtils.arch.getArchList(multilib_arch) if i not in ("noarch", "src")]
def get_valid_arches(tree_arch, multilib=True, add_noarch=True, add_src=False):

123
pungi/checks.py Normal file
View File

@ -0,0 +1,123 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os.path
tools = [
("isomd5sum", "/usr/bin/implantisomd5"),
("isomd5sum", "/usr/bin/checkisomd5"),
("jigdo", "/usr/bin/jigdo-lite"),
("genisoimage", "/usr/bin/genisoimage"),
("gettext", "/usr/bin/msgfmt"),
("syslinux", "/usr/bin/isohybrid"),
("yum-utils", "/usr/bin/createrepo"),
("yum-utils", "/usr/bin/mergerepo"),
("yum-utils", "/usr/bin/repoquery"),
("git", "/usr/bin/git"),
("cvs", "/usr/bin/cvs"),
("gettext", "/usr/bin/msgfmt"),
]
imports = [
("kobo", "kobo"),
("kobo-rpmlib", "kobo.rpmlib"),
("python-lxml", "lxml"),
("koji", "koji"),
("productmd", "productmd"),
]
def check():
fail = False
# Check python modules
for package, module in imports:
try:
__import__(module)
except ImportError:
print("Module '%s' doesn't exist. Install package '%s'." % (module, package))
fail = True
# Check tools
for package, path in tools:
if not os.path.exists(path):
print("Program '%s' doesn't exist. Install package '%s'." % (path, package))
fail = True
return not fail
def validate_options(conf, valid_options):
errors = []
for i in valid_options:
name = i["name"]
value = conf.get(name)
if i.get("deprecated", False):
if name in conf:
errors.append("Deprecated config option: %s; %s" % (name, i["comment"]))
continue
if name not in conf:
if not i.get("optional", False):
errors.append("Config option not set: %s" % name)
continue
# verify type
if "expected_types" in i:
etypes = i["expected_types"]
if not isinstance(etypes, list) and not isinstance(etypes, tuple):
raise TypeError("The 'expected_types' value must be wrapped in a list: %s" % i)
found = False
for etype in etypes:
if isinstance(value, etype):
found = True
break
if not found:
errors.append("Config option '%s' has invalid type: %s. Expected: %s." % (name, str(type(value)), etypes))
continue
# verify value
if "expected_values" in i:
evalues = i["expected_values"]
if not isinstance(evalues, list) and not isinstance(evalues, tuple):
raise TypeError("The 'expected_values' value must be wrapped in a list: %s" % i)
found = False
for evalue in evalues:
if value == evalue:
found = True
break
if not found:
errors.append("Config option '%s' has invalid value: %s. Expected: %s." % (name, value, evalues))
continue
if "requires" in i:
for func, requires in i["requires"]:
if func(value):
for req in requires:
if req not in conf:
errors.append("Config option %s=%s requires %s which is not set" % (name, value, req))
if "conflicts" in i:
for func, conflicts in i["conflicts"]:
if func(value):
for con in conflicts:
if con in conf:
errors.append("Config option %s=%s conflicts with option %s" % (name, value, con))
return errors

241
pungi/compose.py Normal file
View File

@ -0,0 +1,241 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__all__ = (
"Compose",
)
import errno
import os
import time
import tempfile
import shutil
import kobo.log
from productmd import ComposeInfo, ImageManifest
from pypungi.wrappers.variants import VariantsXmlParser
from pypungi.paths import Paths
from pypungi.wrappers.scm import get_file_from_scm
from pypungi.util import makedirs
from pypungi.metadata import compose_to_composeinfo
def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, compose_respin=None, compose_label=None, already_exists_callbacks=None):
topdir = os.path.abspath(topdir)
already_exists_callbacks = already_exists_callbacks or []
# create an incomplete ComposeInfo to generate compose ID
ci = ComposeInfo()
ci.product.name = conf["product_name"]
ci.product.short = conf["product_short"]
ci.product.version = conf["product_version"]
ci.product.is_layered = bool(conf.get("product_is_layered", False))
if ci.product.is_layered:
ci.base_product.name = conf["base_product_name"]
ci.base_product.short = conf["base_product_short"]
ci.base_product.version = conf["base_product_version"]
ci.compose.label = compose_label
ci.compose.type = compose_type
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
ci.compose.respin = compose_respin or 0
# HACK - add topdir for callbacks
ci.topdir = topdir
while 1:
ci.compose.id = ci.create_compose_id()
compose_dir = os.path.join(topdir, ci.compose.id)
exists = False
# TODO: callbacks to determine if a composeid was already used
# for callback in already_exists_callbacks:
# if callback(data):
# exists = True
# break
# already_exists_callbacks fallback: does target compose_dir exist?
if not exists:
try:
os.makedirs(compose_dir)
except OSError as ex:
if ex.errno == errno.EEXIST:
exists = True
else:
raise
if exists:
ci.compose.respin += 1
continue
break
open(os.path.join(compose_dir, "COMPOSE_ID"), "w").write(ci.compose.id)
work_dir = os.path.join(compose_dir, "work", "global")
makedirs(work_dir)
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
return compose_dir
class Compose(kobo.log.LoggingBase):
def __init__(self, conf, topdir, debug=False, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None):
kobo.log.LoggingBase.__init__(self, logger)
# TODO: check if minimal conf values are set
self.conf = conf
self.variants = {}
self.topdir = os.path.abspath(topdir)
self.skip_phases = skip_phases or []
self.just_phases = just_phases or []
self.old_composes = old_composes or []
self.koji_event = koji_event
# intentionally upper-case (visible in the code)
self.DEBUG = debug
# path definitions
self.paths = Paths(self)
# to provide compose_id, compose_date and compose_respin
self.ci_base = ComposeInfo()
self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))
self.supported = supported
if self.compose_label and self.compose_label.split("-")[0] == "RC":
self.log_info("Automatically setting 'supported' flag for a Release Candidate (%s) compose." % self.compose_label)
self.supported = True
self.im = ImageManifest()
if self.DEBUG:
try:
self.im.load(self.paths.compose.metadata("images.json"))
except RuntimeError:
pass
self.im.compose.id = self.compose_id
self.im.compose.type = self.compose_type
self.im.compose.date = self.compose_date
self.im.compose.respin = self.compose_respin
self.im.metadata_path = self.paths.compose.metadata()
get_compose_dir = staticmethod(get_compose_dir)
def __getitem__(self, name):
return self.variants[name]
@property
def compose_id(self):
return self.ci_base.compose.id
@property
def compose_date(self):
return self.ci_base.compose.date
@property
def compose_respin(self):
return self.ci_base.compose.respin
@property
def compose_type(self):
return self.ci_base.compose.type
@property
def compose_type_suffix(self):
return self.ci_base.compose.type_suffix
@property
def compose_label(self):
return self.ci_base.compose.label
@property
def has_comps(self):
return bool(self.conf.get("comps_file", False))
@property
def config_dir(self):
return os.path.dirname(self.conf._open_file or "")
def read_variants(self):
# TODO: move to phases/init ?
variants_file = self.paths.work.variants_file(arch="global")
msg = "Writing variants file: %s" % variants_file
if self.DEBUG and os.path.isfile(variants_file):
self.log_warning("[SKIP ] %s" % msg)
else:
scm_dict = self.conf["variants_file"]
if isinstance(scm_dict, dict):
file_name = os.path.basename(scm_dict["file"])
if scm_dict["scm"] == "file":
scm_dict["file"] = os.path.join(self.config_dir, os.path.basename(scm_dict["file"]))
else:
file_name = os.path.basename(scm_dict)
scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict))
self.log_debug(msg)
tmp_dir = tempfile.mkdtemp(prefix="variants_file_")
get_file_from_scm(scm_dict, tmp_dir, logger=self._logger)
shutil.copy2(os.path.join(tmp_dir, file_name), variants_file)
shutil.rmtree(tmp_dir)
file_obj = open(variants_file, "r")
tree_arches = self.conf.get("tree_arches", None)
self.variants = VariantsXmlParser(file_obj, tree_arches).parse()
# populate ci_base with variants - needed for layered-products (compose_id)
self.ci_base = compose_to_composeinfo(self)
def get_variants(self, types=None, arch=None, recursive=False):
result = []
types = types or ["variant", "optional", "addon", "layered-product"]
for i in self.variants.values():
if i.type in types:
if arch and arch not in i.arches:
continue
result.append(i)
result.extend(i.get_variants(types=types, arch=arch, recursive=recursive))
return sorted(set(result))
def get_arches(self):
result = set()
tree_arches = self.conf.get("tree_arches", None)
for variant in self.get_variants():
for arch in variant.arches:
if tree_arches:
if arch in tree_arches:
result.add(arch)
else:
result.add(arch)
return sorted(result)
def write_status(self, stat_msg):
if stat_msg not in ("STARTED", "FINISHED", "DOOMED"):
self.log_warning("Writing nonstandard compose status: %s" % stat_msg)
old_status = self.get_status()
if stat_msg == old_status:
return
if old_status == "FINISHED":
msg = "Could not modify a FINISHED compose: %s" % self.topdir
self.log_error(msg)
raise RuntimeError(msg)
open(os.path.join(self.topdir, "STATUS"), "w").write(stat_msg + "\n")
def get_status(self):
path = os.path.join(self.topdir, "STATUS")
if not os.path.isfile(path):
return
return open(path, "r").read().strip()

View File

View File

@ -0,0 +1,97 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
The .discinfo file contains metadata about media.
Following fields are part of the .discinfo file,
one record per line:
- timestamp
- release
- architecture
- disc number (optional)
"""
__all__ = (
"read_discinfo",
"write_discinfo",
"write_media_repo",
)
import time
def write_discinfo(file_path, description, arch, disc_numbers=None, timestamp=None):
"""
Write a .discinfo file:
"""
disc_numbers = disc_numbers or ["ALL"]
if not isinstance(disc_numbers, list):
raise TypeError("Invalid type: disc_numbers type is %s; expected: <list>" % type(disc_numbers))
if not timestamp:
timestamp = "%f" % time.time()
f = open(file_path, "w")
f.write("%s\n" % timestamp)
f.write("%s\n" % description)
f.write("%s\n" % arch)
if disc_numbers:
f.write("%s\n" % ",".join([str(i) for i in disc_numbers]))
f.close()
return timestamp
def read_discinfo(file_path):
result = {}
f = open(file_path, "r")
result["timestamp"] = f.readline().strip()
result["description"] = f.readline().strip()
result["arch"] = f.readline().strip()
disc_numbers = f.readline().strip()
if not disc_numbers:
result["disc_numbers"] = None
elif disc_numbers == "ALL":
result["disc_numbers"] = ["ALL"]
else:
result["disc_numbers"] = [int(i) for i in disc_numbers.split(",")]
return result
def write_media_repo(file_path, description, timestamp=None):
"""
Write media.repo file for the disc to be used on installed system.
PackageKit uses this.
"""
if not timestamp:
raise
timestamp = "%f" % time.time()
data = [
"[InstallMedia]",
"name=%s" % description,
"mediaid=%s" % timestamp,
"metadata_expire=-1",
"gpgcheck=0",
"cost=500",
"",
]
repo_file = open(file_path, "w")
repo_file.write("\n".join(data))
repo_file.close()
return timestamp

View File

@ -1,4 +1,6 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
@ -12,12 +14,14 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import time
import yum
from ConfigParser import SafeConfigParser
class Config(SafeConfigParser):
def __init__(self):
SafeConfigParser.__init__(self)

View File

@ -1,4 +1,4 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify

View File

@ -1,6 +1,20 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Pungi adds several new sections to kickstarts.

315
pungi/linker.py Normal file
View File

@ -0,0 +1,315 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import errno
import os
import shutil
import kobo.log
from kobo.shortcuts import relative_path
from kobo.threads import WorkerThread, ThreadPool
from pypungi.util import makedirs
class LinkerPool(ThreadPool):
def __init__(self, link_type="hardlink-or-copy", logger=None):
ThreadPool.__init__(self, logger)
self.link_type = link_type
self.linker = Linker()
class LinkerThread(WorkerThread):
def process(self, item, num):
src, dst = item
if (num % 100 == 0) or (num == self.pool.queue_total):
self.pool.log_debug("Linked %s out of %s packages" % (num, self.pool.queue_total))
self.pool.linker.link(src, dst, link_type=self.pool.link_type)
class Linker(kobo.log.LoggingBase):
def __init__(self, ignore_existing=False, always_copy=None, test=False, logger=None):
kobo.log.LoggingBase.__init__(self, logger=logger)
self.ignore_existing = ignore_existing
self.always_copy = always_copy or []
self.test = test
self._precache = {}
self._inode_map = {}
def _is_same_type(self, path1, path2):
if not os.path.islink(path1) == os.path.islink(path2):
return False
if not os.path.isdir(path1) == os.path.isdir(path2):
return False
if not os.path.isfile(path1) == os.path.isfile(path2):
return False
return True
def _is_same(self, path1, path2):
if self.ignore_existing:
return True
if path1 == path2:
return True
if os.path.islink(path2) and not os.path.exists(path2):
return True
if os.path.getsize(path1) != os.path.getsize(path2):
return False
if int(os.path.getmtime(path1)) != int(os.path.getmtime(path2)):
return False
return True
def symlink(self, src, dst, relative=True):
if src == dst:
return
old_src = src
if relative:
src = relative_path(src, dst)
msg = "Symlinking %s -> %s" % (dst, src)
if self.test:
self.log_info("TEST: %s" % msg)
return
self.log_info(msg)
try:
os.symlink(src, dst)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
if os.path.islink(dst) and self._is_same(old_src, dst):
if os.readlink(dst) != src:
raise
self.log_debug("The same file already exists, skipping symlink %s -> %s" % (dst, src))
else:
raise
def hardlink_on_dest(self, src, dst):
if src == dst:
return
if os.path.exists(src):
st = os.stat(src)
file_name = os.path.basename(src)
precache_key = (file_name, int(st.st_mtime), st.st_size)
if precache_key in self._precache:
self.log_warning("HIT %s" % str(precache_key))
cached_path = self._precache[precache_key]["path"]
self.hardlink(cached_path, dst)
return True
return False
def hardlink(self, src, dst):
if src == dst:
return
msg = "Hardlinking %s to %s" % (src, dst)
if self.test:
self.log_info("TEST: %s" % msg)
return
self.log_info(msg)
try:
os.link(src, dst)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
if self._is_same(src, dst):
if not self._is_same_type(src, dst):
self.log_error("File %s already exists but has different type than %s" % (dst, src))
raise
self.log_debug("The same file already exists, skipping hardlink %s to %s" % (src, dst))
else:
raise
def copy(self, src, dst):
if src == dst:
return True
if os.path.islink(src):
msg = "Copying symlink %s to %s" % (src, dst)
else:
msg = "Copying file %s to %s" % (src, dst)
if self.test:
self.log_info("TEST: %s" % msg)
return
self.log_info(msg)
if os.path.exists(dst):
if self._is_same(src, dst):
if not self._is_same_type(src, dst):
self.log_error("File %s already exists but has different type than %s" % (dst, src))
raise OSError(errno.EEXIST, "File exists")
self.log_debug("The same file already exists, skipping copy %s to %s" % (src, dst))
return
else:
raise OSError(errno.EEXIST, "File exists")
if os.path.islink(src):
if not os.path.islink(dst):
os.symlink(os.readlink(src), dst)
return
return
src_stat = os.stat(src)
src_key = (src_stat.st_dev, src_stat.st_ino)
if src_key in self._inode_map:
# (st_dev, st_ino) found in the mapping
self.log_debug("Harlink detected, hardlinking in destination %s to %s" % (self._inode_map[src_key], dst))
os.link(self._inode_map[src_key], dst)
return
# BEWARE: shutil.copy2 automatically *rewrites* existing files
shutil.copy2(src, dst)
self._inode_map[src_key] = dst
if not self._is_same(src, dst):
self.log_error("File %s doesn't match the copied file %s" % (src, dst))
# XXX:
raise OSError(errno.EEXIST, "File exists")
def _put_into_cache(self, path):
def get_stats(item):
return [item[i] for i in ("st_dev", "st_ino", "st_mtime", "st_size")]
filename = os.path.basename(path)
st = os.stat(path)
item = {
"st_dev": st.st_dev,
"st_ino": st.st_ino,
"st_mtime": int(st.st_mtime),
"st_size": st.st_size,
"path": path,
}
precache_key = (filename, int(st.st_mtime), st.st_size)
if precache_key in self._precache:
if get_stats(self._precache[precache_key]) != get_stats(item):
# Files have same mtime and size but device
# or/and inode is/are different.
self.log_debug("Caching failed, files are different: %s, %s"
% (path, self._precache[precache_key]["path"]))
return False
self._precache[precache_key] = item
return True
def scan(self, path):
"""Recursively scan a directory and populate the cache."""
msg = "Scanning directory: %s" % path
self.log_debug("[BEGIN] %s" % msg)
for dirpath, _, filenames in os.walk(path):
for filename in filenames:
path = os.path.join(dirpath, filename)
self._put_into_cache(path)
self.log_debug("[DONE ] %s" % msg)
def _link_file(self, src, dst, link_type):
if link_type == "hardlink":
if not self.hardlink_on_dest(src, dst):
self.hardlink(src, dst)
elif link_type == "copy":
self.copy(src, dst)
elif link_type in ("symlink", "abspath-symlink"):
if os.path.islink(src):
self.copy(src, dst)
else:
relative = link_type != "abspath-symlink"
self.symlink(src, dst, relative)
elif link_type == "hardlink-or-copy":
if not self.hardlink_on_dest(src, dst):
src_stat = os.stat(src)
dst_stat = os.stat(os.path.dirname(dst))
if src_stat.st_dev == dst_stat.st_dev:
self.hardlink(src, dst)
else:
self.copy(src, dst)
else:
raise ValueError("Unknown link_type: %s" % link_type)
def link(self, src, dst, link_type="hardlink-or-copy", scan=True):
"""Link directories recursively."""
if os.path.isfile(src) or os.path.islink(src):
self._link_file(src, dst, link_type)
return
if os.path.isfile(dst):
raise OSError(errno.EEXIST, "File exists")
if not self.test:
if not os.path.exists(dst):
makedirs(dst)
shutil.copystat(src, dst)
for i in os.listdir(src):
src_path = os.path.join(src, i)
dst_path = os.path.join(dst, i)
self.link(src_path, dst_path, link_type)
return
if scan:
self.scan(dst)
self.log_debug("Start linking")
src = os.path.abspath(src)
for dirpath, dirnames, filenames in os.walk(src):
rel_path = dirpath[len(src):].lstrip("/")
dst_path = os.path.join(dst, rel_path)
# Dir check and creation
if not os.path.isdir(dst_path):
if os.path.exists(dst_path):
# At destination there is a file with same name but
# it is not a directory.
self.log_error("Cannot create directory %s" % dst_path)
dirnames = [] # noqa
continue
os.mkdir(dst_path)
# Process all files in directory
for filename in filenames:
path = os.path.join(dirpath, filename)
st = os.stat(path)
# Check cache
# Same file already exists at a destination dir =>
# Create the new file by hardlink to the cached one.
precache_key = (filename, int(st.st_mtime), st.st_size)
full_dst_path = os.path.join(dst_path, filename)
if precache_key in self._precache:
# Cache hit
cached_path = self._precache[precache_key]["path"]
self.log_debug("Cache HIT for %s [%s]" % (path, cached_path))
if cached_path != full_dst_path:
self.hardlink(cached_path, full_dst_path)
else:
self.log_debug("Files are same, skip hardlinking")
continue
# Cache miss
# Copy the new file and put it to the cache.
try:
self.copy(path, full_dst_path)
except Exception as ex:
print(ex)
print(path, open(path, "r").read())
print(full_dst_path, open(full_dst_path, "r").read())
print(os.stat(path))
print(os.stat(full_dst_path))
os.utime(full_dst_path, (st.st_atime, int(st.st_mtime)))
self._put_into_cache(full_dst_path)

133
pungi/media_split.py Normal file
View File

@ -0,0 +1,133 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
SIZE_UNITS = {
"b": 1,
"k": 1024,
"M": 1024 ** 2,
"G": 1024 ** 3,
}
def convert_media_size(size):
if isinstance(size, str):
if size[-1] in SIZE_UNITS:
num = int(size[:-1])
units = size[-1]
else:
num = int(size)
units = "b"
result = num * SIZE_UNITS[units]
else:
result = int(size)
if result <= 0:
raise ValueError("Media size must be a positive number: %s" % size)
return result
def convert_file_size(size, block_size=2048):
"""round file size to block"""
blocks = int(size / block_size)
if size % block_size:
blocks += 1
return blocks * block_size
class MediaSplitter(object):
def __init__(self, media_size):
self.media_size = convert_media_size(media_size)
self.files = [] # to preserve order
self.file_sizes = {}
self.sticky_files = set()
def add_file(self, name, size, sticky=False):
name = os.path.normpath(name)
size = int(size)
old_size = self.file_sizes.get(name, None)
if old_size is None:
self.files.append(name)
self.file_sizes[name] = size
elif old_size != size:
raise ValueError("File size mismatch; file: %s; sizes: %s vs %s" % (name, old_size, size))
elif size > self.media_size:
raise ValueError("File is larger than media size: %s" % name)
if sticky:
self.sticky_files.add(name)
'''
def load(self, file_name):
f = open(file_name, "r")
for line in f:
line = line.strip()
if not line:
continue
name, size = line.split(" ")
self.add_file(name, size)
f.close()
def scan(self, pattern):
for i in glob.glob(pattern):
self.add_file(i, os.path.getsize(i))
def dump(self, file_name):
f = open(file_name, "w")
for name in self.files:
f.write("%s %s\n" % (os.path.basename(name), self.file_sizes[name]))
f.close()
'''
@property
def total_size(self):
return sum(self.file_sizes.values())
@property
def total_size_in_blocks(self):
return sum([convert_file_size(i) for i in list(self.file_sizes.values())])
def split(self, first_disk=0, all_disks=0):
all_files = []
sticky_files = []
sticky_files_size = 0
for name in self.files:
if name in self.sticky_files:
sticky_files.append(name)
sticky_files_size += convert_file_size(self.file_sizes[name])
else:
all_files.append(name)
disks = []
disk = {}
while all_files:
name = all_files.pop(0)
size = convert_file_size(self.file_sizes[name])
if not disks or disk["size"] + size > self.media_size:
disk = {"size": 0, "files": []}
disks.append(disk)
disk["files"].extend(sticky_files)
disk["size"] += sticky_files_size
disk["files"].append(name)
disk["size"] += convert_file_size(self.file_sizes[name])
return disks

306
pungi/metadata.py Normal file
View File

@ -0,0 +1,306 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import time
import productmd.composeinfo
import productmd.treeinfo
import productmd.treeinfo.product
from productmd import get_major_version
from kobo.shortcuts import relative_path
from pypungi.compose_metadata.discinfo import write_discinfo as create_discinfo
from pypungi.compose_metadata.discinfo import write_media_repo as create_media_repo
def get_description(compose, variant, arch):
if "product_discinfo_description" in compose.conf:
result = compose.conf["product_discinfo_description"]
elif variant.type == "layered-product":
# we need to make sure the layered product behaves as it was composed separately
result = "%s %s for %s %s" % (variant.product_name, variant.product_version, compose.conf["product_name"], get_major_version(compose.conf["product_version"]))
else:
result = "%s %s" % (compose.conf["product_name"], compose.conf["product_version"])
if compose.conf.get("is_layered", False):
result += "for %s %s" % (compose.conf["base_product_name"], compose.conf["base_product_version"])
result = result % {"variant_name": variant.name, "arch": arch}
return result
def write_discinfo(compose, arch, variant):
if variant.type == "addon":
return
os_tree = compose.paths.compose.os_tree(arch, variant)
path = os.path.join(os_tree, ".discinfo")
# description = get_volid(compose, arch, variant)
description = get_description(compose, variant, arch)
return create_discinfo(path, description, arch)
def write_media_repo(compose, arch, variant, timestamp=None):
if variant.type == "addon":
return
os_tree = compose.paths.compose.os_tree(arch, variant)
path = os.path.join(os_tree, "media.repo")
# description = get_volid(compose, arch, variant)
description = get_description(compose, variant, arch)
return create_media_repo(path, description, timestamp)
def compose_to_composeinfo(compose):
ci = productmd.composeinfo.ComposeInfo()
# compose
ci.compose.id = compose.compose_id
ci.compose.type = compose.compose_type
ci.compose.date = compose.compose_date
ci.compose.respin = compose.compose_respin
ci.compose.label = compose.compose_label
# product
ci.product.name = compose.conf["product_name"]
ci.product.version = compose.conf["product_version"]
ci.product.short = compose.conf["product_short"]
ci.product.is_layered = compose.conf.get("product_is_layered", False)
# base product
if ci.product.is_layered:
ci.base_product.name = compose.conf["base_product_name"]
ci.base_product.version = compose.conf["base_product_version"]
ci.base_product.short = compose.conf["base_product_short"]
def dump_variant(variant, parent=None):
var = productmd.composeinfo.Variant(ci)
tree_arches = compose.conf.get("tree_arches", None)
if tree_arches and not (set(variant.arches) & set(tree_arches)):
return None
# variant details
var.id = variant.id
var.uid = variant.uid
var.name = variant.name
var.type = variant.type
var.arches = set(variant.arches)
if var.type == "layered-product":
var.product.name = variant.product_name
var.product.short = variant.product_short
var.product.version = variant.product_version
var.product.is_layered = True
for arch in variant.arches:
# paths: binaries
var.os_tree[arch] = relative_path(compose.paths.compose.os_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
var.repository[arch] = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
var.packages[arch] = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
iso_dir = compose.paths.compose.iso_dir(arch=arch, variant=variant, create_dir=False) or ""
if iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), iso_dir)):
var.isos[arch] = relative_path(iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
jigdo_dir = compose.paths.compose.jigdo_dir(arch=arch, variant=variant, create_dir=False) or ""
if jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), jigdo_dir)):
var.jigdos[arch] = relative_path(jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
# paths: sources
var.source_tree[arch] = relative_path(compose.paths.compose.os_tree(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
var.source_repository[arch] = relative_path(compose.paths.compose.repository(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
var.source_packages[arch] = relative_path(compose.paths.compose.packages(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
source_iso_dir = compose.paths.compose.iso_dir(arch="source", variant=variant, create_dir=False) or ""
if source_iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_iso_dir)):
var.source_isos[arch] = relative_path(source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
source_jigdo_dir = compose.paths.compose.jigdo_dir(arch="source", variant=variant, create_dir=False) or ""
if source_jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_jigdo_dir)):
var.source_jigdos[arch] = relative_path(source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
# paths: debug
var.debug_tree[arch] = relative_path(compose.paths.compose.debug_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
var.debug_repository[arch] = relative_path(compose.paths.compose.debug_repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
var.debug_packages[arch] = relative_path(compose.paths.compose.debug_packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
'''
# XXX: not suported (yet?)
debug_iso_dir = compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
if debug_iso_dir:
var.debug_iso_dir[arch] = relative_path(debug_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
debug_jigdo_dir = compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
if debug_jigdo_dir:
var.debug_jigdo_dir[arch] = relative_path(debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
'''
for v in variant.get_variants(recursive=False):
x = dump_variant(v, parent=variant)
if x is not None:
var.add(x)
return var
for variant_id in sorted(compose.variants):
variant = compose.variants[variant_id]
v = dump_variant(variant)
if v is not None:
ci.variants.add(v)
return ci
def write_compose_info(compose):
ci = compose_to_composeinfo(compose)
msg = "Writing composeinfo"
compose.log_info("[BEGIN] %s" % msg)
path = compose.paths.compose.metadata("composeinfo.json")
ci.dump(path)
compose.log_info("[DONE ] %s" % msg)
def write_tree_info(compose, arch, variant, timestamp=None):
if variant.type in ("addon", ):
return
if not timestamp:
timestamp = int(time.time())
else:
timestamp = int(timestamp)
os_tree = compose.paths.compose.os_tree(arch=arch, variant=variant).rstrip("/") + "/"
ti = productmd.treeinfo.TreeInfo()
# load from buildinstall .treeinfo
if variant.type == "layered-product":
# we need to make sure the layered product behaves as it was composed separately
# product
# TODO: read from variants.xml
ti.product.name = variant.product_name
ti.product.version = variant.product_version
ti.product.short = variant.product_short
ti.product.is_layered = True
# base product
ti.base_product.name = compose.conf["product_name"]
if "." in compose.conf["product_version"]:
# remove minor version if present
ti.base_product.version = get_major_version(compose.conf["product_version"])
else:
ti.base_product.version = compose.conf["product_version"]
ti.base_product.short = compose.conf["product_short"]
else:
# product
ti.product.name = compose.conf["product_name"]
ti.product.version = compose.conf["product_version"]
ti.product.short = compose.conf["product_short"]
ti.product.is_layered = compose.conf.get("product_is_layered", False)
# base product
if ti.product.is_layered:
ti.base_product.name = compose.conf["base_product_name"]
ti.base_product.version = compose.conf["base_product_version"]
ti.base_product.short = compose.conf["base_product_short"]
# tree
ti.tree.arch = arch
ti.tree.build_timestamp = timestamp
# ti.platforms
# main variant
var = productmd.treeinfo.Variant(ti)
if variant.type == "layered-product":
var.id = variant.parent.id
var.uid = variant.parent.uid
var.name = variant.parent.name
var.type = "variant"
else:
var.id = variant.id
var.uid = variant.uid
var.name = variant.name
var.type = variant.type
var.packages = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
var.repository = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
ti.variants.add(var)
repomd_path = os.path.join(var.repository, "repodata", "repomd.xml")
ti.checksums.add(os_tree, repomd_path)
for i in variant.get_variants(types=["addon"], arch=arch):
addon = productmd.treeinfo.Variant(ti)
addon.id = i.id
addon.uid = i.uid
addon.name = i.name
addon.type = i.type
os_tree = compose.paths.compose.os_tree(arch=arch, variant=i).rstrip("/") + "/"
addon.packages = relative_path(compose.paths.compose.packages(arch=arch, variant=i, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
addon.repository = relative_path(compose.paths.compose.repository(arch=arch, variant=i, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
var.add(addon)
repomd_path = os.path.join(addon.repository, "repodata", "repomd.xml")
ti.checksums.add(os_tree, repomd_path)
class LoraxProduct(productmd.treeinfo.product.Product):
def _check_short(self):
# HACK: set self.short so .treeinfo produced by lorax can be read
if not self.short:
self.short = compose.conf["product_short"]
class LoraxTreeInfo(productmd.TreeInfo):
def clear(self):
productmd.TreeInfo.clear(self)
self.product = LoraxProduct(self)
# images
if variant.type == "variant":
os_tree = compose.paths.compose.os_tree(arch, variant)
# clone all but 'general' sections from buildinstall .treeinfo
bi_treeinfo = os.path.join(compose.paths.work.buildinstall_dir(arch), ".treeinfo")
if os.path.exists(bi_treeinfo):
bi_ti = LoraxTreeInfo()
bi_ti.load(bi_treeinfo)
# stage2 - mainimage
if bi_ti.stage2.mainimage:
ti.stage2.mainimage = bi_ti.stage2.mainimage
ti.checksums.add(os_tree, ti.stage2.mainimage)
# stage2 - instimage
if bi_ti.stage2.instimage:
ti.stage2.instimage = bi_ti.stage2.instimage
ti.checksums.add(os_tree, ti.stage2.instimage)
# images
for platform in bi_ti.images.images:
ti.images.images[platform] = {}
ti.tree.platforms.add(platform)
for image, path in bi_ti.images.images[platform].items():
ti.images.images[platform][image] = path
ti.checksums.add(os_tree, path)
# add product.img to images-$arch
product_img = os.path.join(os_tree, "images", "product.img")
product_img_relpath = relative_path(product_img, os_tree.rstrip("/") + "/")
if os.path.isfile(product_img):
for platform in ti.images.images:
ti.images.images[platform]["product.img"] = product_img_relpath
ti.checksums.add(os_tree, product_img_relpath)
path = os.path.join(compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo")
compose.log_info("Writing treeinfo: %s" % path)
ti.dump(path)

View File

@ -1,6 +1,20 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import fnmatch

526
pungi/paths.py Normal file
View File

@ -0,0 +1,526 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__all__ = (
"Paths",
)
import errno
import os
from pypungi.util import makedirs
class Paths(object):
def __init__(self, compose):
paths_module_name = compose.conf.get("paths_module", None)
if paths_module_name:
# custom paths
compose.log_info("Using custom paths from module %s" % paths_module_name)
paths_module = __import__(paths_module_name, globals(), locals(), ["LogPaths", "WorkPaths", "ComposePaths"])
self.compose = paths_module.ComposePaths(compose)
self.log = paths_module.LogPaths(compose)
self.work = paths_module.WorkPaths(compose)
else:
# default paths
self.compose = ComposePaths(compose)
self.log = LogPaths(compose)
self.work = WorkPaths(compose)
# self.metadata ?
class LogPaths(object):
def __init__(self, compose):
self.compose = compose
def topdir(self, arch=None, create_dir=True):
"""
Examples:
log/global
log/x86_64
"""
arch = arch or "global"
path = os.path.join(self.compose.topdir, "logs", arch)
if create_dir:
makedirs(path)
return path
def log_file(self, arch, log_name, create_dir=True):
arch = arch or "global"
if log_name.endswith(".log"):
log_name = log_name[:-4]
return os.path.join(self.topdir(arch, create_dir=create_dir), "%s.%s.log" % (log_name, arch))
class WorkPaths(object):
def __init__(self, compose):
self.compose = compose
def topdir(self, arch=None, create_dir=True):
"""
Examples:
work/global
work/x86_64
"""
arch = arch or "global"
path = os.path.join(self.compose.topdir, "work", arch)
if create_dir:
makedirs(path)
return path
def variants_file(self, arch=None, create_dir=True):
"""
Examples:
work/global/variants.xml
"""
arch = "global"
path = os.path.join(self.topdir(arch, create_dir=create_dir), "variants.xml")
return path
def comps(self, arch=None, variant=None, create_dir=True):
"""
Examples:
work/x86_64/comps/comps-86_64.xml
work/x86_64/comps/comps-Server.x86_64.xml
"""
arch = arch or "global"
if variant is None:
file_name = "comps-%s.xml" % arch
else:
file_name = "comps-%s.%s.xml" % (variant.uid, arch)
path = os.path.join(self.topdir(arch, create_dir=create_dir), "comps")
if create_dir:
makedirs(path)
path = os.path.join(path, file_name)
return path
def pungi_conf(self, arch=None, variant=None, create_dir=True):
"""
Examples:
work/x86_64/pungi/x86_64.conf
work/x86_64/pungi/Server.x86_64.conf
"""
arch = arch or "global"
if variant is None:
file_name = "%s.conf" % arch
else:
file_name = "%s.%s.conf" % (variant.uid, arch)
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi")
if create_dir:
makedirs(path)
path = os.path.join(path, file_name)
return path
def pungi_log(self, arch=None, variant=None, create_dir=True):
"""
Examples:
work/x86_64/pungi/x86_64.log
work/x86_64/pungi/Server.x86_64.log
"""
path = self.pungi_conf(arch, variant, create_dir=create_dir)
path = path[:-5] + ".log"
return path
def pungi_cache_dir(self, arch, variant=None, create_dir=True):
"""
Examples:
work/global/pungi-cache
"""
# WARNING: Using the same cache dir with repos of the same names may lead to a race condition
# We should use per arch variant cache dirs to workaround this.
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi-cache")
if variant:
path = os.path.join(path, variant.uid)
if create_dir:
makedirs(path)
return path
def comps_repo(self, arch=None, create_dir=True):
"""
Examples:
work/x86_64/comps-repo
work/global/comps-repo
"""
arch = arch or "global"
path = os.path.join(self.topdir(arch, create_dir=create_dir), "comps_repo")
if create_dir:
makedirs(path)
return path
def arch_repo(self, arch=None, create_dir=True):
"""
Examples:
work/x86_64/repo
work/global/repo
"""
arch = arch or "global"
path = os.path.join(self.topdir(arch, create_dir=create_dir), "repo")
if create_dir:
makedirs(path)
return path
def package_list(self, arch=None, variant=None, pkg_type=None, create_dir=True):
"""
Examples:
work/x86_64/package_list/x86_64.conf
work/x86_64/package_list/Server.x86_64.conf
work/x86_64/package_list/Server.x86_64.rpm.conf
"""
arch = arch or "global"
if variant is not None:
file_name = "%s.%s" % (variant, arch)
else:
file_name = "%s" % arch
if pkg_type is not None:
file_name += ".%s" % pkg_type
file_name += ".conf"
path = os.path.join(self.topdir(arch, create_dir=create_dir), "package_list")
if create_dir:
makedirs(path)
path = os.path.join(path, file_name)
return path
def pungi_download_dir(self, arch, create_dir=True):
"""
Examples:
work/x86_64/pungi_download
"""
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi_download")
if create_dir:
makedirs(path)
return path
def buildinstall_dir(self, arch, create_dir=True):
"""
Examples:
work/x86_64/buildinstall
"""
if arch == "global":
raise RuntimeError("Global buildinstall dir makes no sense.")
path = os.path.join(self.topdir(arch, create_dir=create_dir), "buildinstall")
return path
def extra_files_dir(self, arch, variant, create_dir=True):
"""
Examples:
work/x86_64/Server/extra-files
"""
if arch == "global":
raise RuntimeError("Global extra files dir makes no sense.")
path = os.path.join(self.topdir(arch, create_dir=create_dir), variant.uid, "extra-files")
if create_dir:
makedirs(path)
return path
def repo_package_list(self, arch, variant, pkg_type=None, create_dir=True):
"""
Examples:
work/x86_64/repo_package_list/Server.x86_64.rpm.conf
"""
file_name = "%s.%s" % (variant, arch)
if pkg_type is not None:
file_name += ".%s" % pkg_type
file_name += ".conf"
path = os.path.join(self.topdir(arch, create_dir=create_dir), "repo_package_list")
if create_dir:
makedirs(path)
path = os.path.join(path, file_name)
return path
def product_img(self, variant, create_dir=True):
"""
Examples:
work/global/product-Server.img
"""
file_name = "product-%s.img" % variant
path = self.topdir(arch="global", create_dir=create_dir)
path = os.path.join(path, file_name)
return path
def iso_dir(self, arch, variant, disc_type="dvd", disc_num=1, create_dir=True):
"""
Examples:
work/x86_64/iso/rhel-7.0-20120127.0-Server-x86_64-dvd1.iso
"""
dir_name = self.compose.paths.compose.iso_path(arch, variant, disc_type, disc_num, create_dir=False)
dir_name = os.path.basename(dir_name)
path = os.path.join(self.topdir(arch, create_dir=create_dir), "iso", dir_name)
if create_dir:
makedirs(path)
return path
def tmp_dir(self, arch, variant=None, create_dir=True):
"""
Examples:
work/x86_64/tmp
work/x86_64/tmp-Server
"""
dir_name = "tmp"
if variant:
dir_name += "-%s" % variant.uid
path = os.path.join(self.topdir(arch, create_dir=create_dir), dir_name)
if create_dir:
makedirs(path)
return path
def product_id(self, arch, variant, create_dir=True):
"""
Examples:
work/x86_64/product_id/productid-Server.x86_64.pem/productid
"""
# file_name = "%s.%s.pem" % (variant, arch)
# HACK: modifyrepo doesn't handle renames -> $dir/productid
file_name = "productid"
path = os.path.join(self.topdir(arch, create_dir=create_dir), "product_id", "%s.%s.pem" % (variant, arch))
if create_dir:
makedirs(path)
path = os.path.join(path, file_name)
return path
class ComposePaths(object):
def __init__(self, compose):
self.compose = compose
# TODO: TREES?
def topdir(self, arch=None, variant=None, create_dir=True, relative=False):
"""
Examples:
compose
compose/Server/x86_64
"""
if bool(arch) != bool(variant):
raise TypeError("topdir(): either none or 2 arguments are expected")
path = ""
if not relative:
path = os.path.join(self.compose.topdir, "compose")
if arch or variant:
if variant.type == "addon":
return self.topdir(arch, variant.parent, create_dir=create_dir, relative=relative)
path = os.path.join(path, variant.uid, arch)
if create_dir and not relative:
makedirs(path)
return path
def tree_dir(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/os
compose/Server-optional/x86_64/os
"""
if arch == "src":
arch = "source"
if arch == "source":
tree_dir = "tree"
else:
# use 'os' dir due to historical reasons
tree_dir = "os"
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), tree_dir)
if create_dir and not relative:
makedirs(path)
return path
def os_tree(self, arch, variant, create_dir=True, relative=False):
return self.tree_dir(arch, variant, create_dir=create_dir, relative=relative)
def repository(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/os
compose/Server/x86_64/addons/LoadBalancer
"""
if variant.type == "addon":
path = self.packages(arch, variant, create_dir=create_dir, relative=relative)
else:
path = self.tree_dir(arch, variant, create_dir=create_dir, relative=relative)
if create_dir and not relative:
makedirs(path)
return path
def packages(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/os/Packages
compose/Server/x86_64/os/addons/LoadBalancer
compose/Server-optional/x86_64/os/Packages
"""
if variant.type == "addon":
path = os.path.join(self.tree_dir(arch, variant, create_dir=create_dir, relative=relative), "addons", variant.id)
else:
path = os.path.join(self.tree_dir(arch, variant, create_dir=create_dir, relative=relative), "Packages")
if create_dir and not relative:
makedirs(path)
return path
def debug_topdir(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/debug
compose/Server-optional/x86_64/debug
"""
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), "debug")
if create_dir and not relative:
makedirs(path)
return path
def debug_tree(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/debug/tree
compose/Server-optional/x86_64/debug/tree
"""
path = os.path.join(self.debug_topdir(arch, variant, create_dir=create_dir, relative=relative), "tree")
if create_dir and not relative:
makedirs(path)
return path
def debug_packages(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/debug/tree/Packages
compose/Server/x86_64/debug/tree/addons/LoadBalancer
compose/Server-optional/x86_64/debug/tree/Packages
"""
if arch in ("source", "src"):
return None
if variant.type == "addon":
path = os.path.join(self.debug_tree(arch, variant, create_dir=create_dir, relative=relative), "addons", variant.id)
else:
path = os.path.join(self.debug_tree(arch, variant, create_dir=create_dir, relative=relative), "Packages")
if create_dir and not relative:
makedirs(path)
return path
def debug_repository(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/debug/tree
compose/Server/x86_64/debug/tree/addons/LoadBalancer
compose/Server-optional/x86_64/debug/tree
"""
if arch in ("source", "src"):
return None
if variant.type == "addon":
path = os.path.join(self.debug_tree(arch, variant, create_dir=create_dir, relative=relative), "addons", variant.id)
else:
path = self.debug_tree(arch, variant, create_dir=create_dir, relative=relative)
if create_dir and not relative:
makedirs(path)
return path
def iso_dir(self, arch, variant, symlink_to=None, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/iso
None
"""
if variant.type == "addon":
return None
if variant.type == "optional":
if not self.compose.conf["create_optional_isos"]:
return None
if arch == "src":
arch = "source"
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), "iso")
if symlink_to:
# TODO: create_dir
topdir = self.compose.topdir.rstrip("/") + "/"
relative_dir = path[len(topdir):]
target_dir = os.path.join(symlink_to, self.compose.compose_id, relative_dir)
if create_dir and not relative:
makedirs(target_dir)
try:
os.symlink(target_dir, path)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
msg = "Symlink pointing to '%s' expected: %s" % (target_dir, path)
if not os.path.islink(path):
raise RuntimeError(msg)
if os.path.abspath(os.readlink(path)) != target_dir:
raise RuntimeError(msg)
else:
if create_dir and not relative:
makedirs(path)
return path
def iso_path(self, arch, variant, disc_type="dvd", disc_num=1, suffix=".iso", symlink_to=None, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/iso/rhel-7.0-20120127.0-Server-x86_64-dvd1.iso
None
"""
if arch == "src":
arch = "source"
if disc_type not in ("cd", "dvd", "ec2", "live", "boot"):
raise RuntimeError("Unsupported disc type: %s" % disc_type)
if disc_num:
disc_num = int(disc_num)
else:
disc_num = ""
path = self.iso_dir(arch, variant, symlink_to=symlink_to, create_dir=create_dir, relative=relative)
if path is None:
return None
compose_id = self.compose.ci_base[variant.uid].compose_id
if variant.type == "layered-product":
variant_uid = variant.parent.uid
else:
variant_uid = variant.uid
file_name = "%s-%s-%s-%s%s%s" % (compose_id, variant_uid, arch, disc_type, disc_num, suffix)
result = os.path.join(path, file_name)
return result
def jigdo_dir(self, arch, variant, create_dir=True, relative=False):
"""
Examples:
compose/Server/x86_64/jigdo
None
"""
if variant.type == "addon":
return None
if variant.type == "optional":
if not self.compose.conf["create_optional_isos"]:
return None
if arch == "src":
arch = "source"
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), "jigdo")
if create_dir and not relative:
makedirs(path)
return path
def metadata(self, file_name=None, create_dir=True, relative=False):
"""
Examples:
compose/metadata
compose/metadata/rpms.json
"""
path = os.path.join(self.topdir(create_dir=create_dir, relative=relative), "metadata")
if create_dir and not relative:
makedirs(path)
if file_name:
path = os.path.join(path, file_name)
return path

28
pungi/phases/__init__.py Normal file
View File

@ -0,0 +1,28 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# phases in runtime order
from init import InitPhase # noqa
from pkgset import PkgsetPhase # noqa
from gather import GatherPhase # noqa
from createrepo import CreaterepoPhase # noqa
from product_img import ProductimgPhase # noqa
from buildinstall import BuildinstallPhase # noqa
from extra_files import ExtraFilesPhase # noqa
from createiso import CreateisoPhase # noqa
from live_images import LiveImagesPhase # noqa
from test import TestPhase # noqa

73
pungi/phases/base.py Normal file
View File

@ -0,0 +1,73 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from pypungi.checks import validate_options
class PhaseBase(object):
config_options = ()
def __init__(self, compose):
self.compose = compose
self.msg = "---------- PHASE: %s ----------" % self.name.upper()
self.finished = False
self._skipped = False
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
def conf_assert_str(self, name):
missing = []
invalid = []
if name not in self.compose.conf:
missing.append(name)
elif not isinstance(self.compose.conf[name], str):
invalid.append(name, type(self.compose.conf[name]), str)
return missing, invalid
def skip(self):
if self._skipped:
return True
if self.compose.just_phases and self.name not in self.compose.just_phases:
return True
if self.name in self.compose.skip_phases:
return True
if self.name in self.compose.conf.get("skip_phases", []):
return True
return False
def start(self):
self._skipped = self.skip()
if self._skipped:
self.compose.log_warning("[SKIP ] %s" % self.msg)
self.finished = True
return
self.compose.log_info("[BEGIN] %s" % self.msg)
self.run()
def stop(self):
if self.finished:
return
if hasattr(self, "pool"):
self.pool.stop()
self.finished = True
self.compose.log_info("[DONE ] %s" % self.msg)
def run(self):
raise NotImplementedError

View File

@ -0,0 +1,360 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import errno
import os
import time
import pipes
import tempfile
import shutil
import re
import errno
from kobo.threads import ThreadPool, WorkerThread
from kobo.shortcuts import run, read_checksum_file, relative_path
from productmd.imagemanifest import Image
from pypungi.util import get_buildroot_rpms, get_volid
from pypungi.wrappers.lorax import LoraxWrapper
from pypungi.wrappers.kojiwrapper import KojiWrapper
from pypungi.wrappers.iso import IsoWrapper
from pypungi.wrappers.scm import get_file_from_scm
from pypungi.phases.base import PhaseBase
class BuildinstallPhase(PhaseBase):
name = "buildinstall"
config_options = (
{
"name": "bootable",
"expected_types": [bool],
"expected_values": [True],
},
{
"name": "buildinstall_method",
"extected_types": [str],
"expected_values": ["lorax", "buildinstall"],
"requires": (
(lambda x: bool(x) is True, ["bootable"]),
),
},
{
"name": "buildinstall_upgrade_image",
"expected_types": [bool],
"optional": True,
},
{
"name": "buildinstall_kickstart",
"expected_types": [str],
"optional": True,
},
)
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.pool = ThreadPool(logger=self.compose._logger)
def skip(self):
if PhaseBase.skip(self):
return True
if not self.compose.conf.get("bootable"):
msg = "Not a bootable product. Skipping buildinstall."
self.compose.log_debug(msg)
return True
return False
def run(self):
lorax = LoraxWrapper()
product = self.compose.conf["product_name"]
version = self.compose.conf["product_version"]
release = self.compose.conf["product_version"]
noupgrade = not self.compose.conf.get("buildinstall_upgrade_image", False)
buildinstall_method = self.compose.conf["buildinstall_method"]
for arch in self.compose.get_arches():
repo_baseurl = self.compose.paths.work.arch_repo(arch)
output_dir = self.compose.paths.work.buildinstall_dir(arch)
volid = get_volid(self.compose, arch)
if buildinstall_method == "lorax":
cmd = lorax.get_lorax_cmd(product, version, release, repo_baseurl, output_dir, is_final=self.compose.supported, buildarch=arch, volid=volid, nomacboot=True, noupgrade=noupgrade)
elif buildinstall_method == "buildinstall":
cmd = lorax.get_buildinstall_cmd(product, version, release, repo_baseurl, output_dir, is_final=self.compose.supported, buildarch=arch, volid=volid)
else:
raise ValueError("Unsupported buildinstall method: %s" % buildinstall_method)
self.pool.add(BuildinstallThread(self.pool))
self.pool.queue_put((self.compose, arch, cmd))
self.pool.start()
def copy_files(self):
# copy buildinstall files to the 'os' dir
kickstart_file = get_kickstart_file(self.compose)
for arch in self.compose.get_arches():
for variant in self.compose.get_variants(arch=arch, types=["self", "variant"]):
buildinstall_dir = self.compose.paths.work.buildinstall_dir(arch)
if not os.path.isdir(buildinstall_dir) or not os.listdir(buildinstall_dir):
continue
os_tree = self.compose.paths.compose.os_tree(arch, variant)
# TODO: label is not used
label = ""
volid = get_volid(self.compose, arch, variant, escape_spaces=False)
tweak_buildinstall(buildinstall_dir, os_tree, arch, variant.uid, label, volid, kickstart_file)
symlink_boot_iso(self.compose, arch, variant)
def get_kickstart_file(compose):
scm_dict = compose.conf.get("buildinstall_kickstart", None)
if not scm_dict:
compose.log_debug("Path to ks.cfg (buildinstall_kickstart) not specified.")
return
msg = "Getting ks.cfg"
kickstart_path = os.path.join(compose.paths.work.topdir(arch="global"), "ks.cfg")
if os.path.exists(kickstart_path):
compose.log_warn("[SKIP ] %s" % msg)
return kickstart_path
compose.log_info("[BEGIN] %s" % msg)
if isinstance(scm_dict, dict):
kickstart_name = os.path.basename(scm_dict["file"])
if scm_dict["scm"] == "file":
scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"])
else:
kickstart_name = os.path.basename(scm_dict)
scm_dict = os.path.join(compose.config_dir, scm_dict)
tmp_dir = tempfile.mkdtemp(prefix="buildinstall_kickstart_")
get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger)
src = os.path.join(tmp_dir, kickstart_name)
shutil.copy2(src, kickstart_path)
compose.log_info("[DONE ] %s" % msg)
return kickstart_path
# HACK: this is a hack!
# * it's quite trivial to replace volids
# * it's not easy to replace menu titles
# * we probably need to get this into lorax
def tweak_buildinstall(src, dst, arch, variant, label, volid, kickstart_file=None):
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
tmp_dir = tempfile.mkdtemp(prefix="tweak_buildinstall_")
# verify src
if not os.path.isdir(src):
raise OSError(errno.ENOENT, "Directory does not exist: %s" % src)
# create dst
try:
os.makedirs(dst)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
# copy src to temp
# TODO: place temp on the same device as buildinstall dir so we can hardlink
cmd = "cp -av --remove-destination %s/* %s/" % (pipes.quote(src), pipes.quote(tmp_dir))
run(cmd)
# tweak configs
configs = [
"isolinux/isolinux.cfg",
"etc/yaboot.conf",
"ppc/ppc64/yaboot.conf",
"EFI/BOOT/BOOTX64.conf",
"EFI/BOOT/grub.cfg",
]
for config in configs:
config_path = os.path.join(tmp_dir, config)
if not os.path.exists(config_path):
continue
data = open(config_path, "r").read()
os.unlink(config_path) # break hadlink by removing file writing a new one
new_volid = volid_escaped
if "yaboot" in config:
# double-escape volid in yaboot.conf
new_volid = volid_escaped_2
ks = ""
if kickstart_file:
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
ks = " ks=hd:LABEL=%s:/ks.cfg" % new_volid
# pre-f18
data = re.sub(r":CDLABEL=[^ \n]*", r":CDLABEL=%s%s" % (new_volid, ks), data)
# f18+
data = re.sub(r":LABEL=[^ \n]*", r":LABEL=%s%s" % (new_volid, ks), data)
data = re.sub(r"(search .* -l) '[^'\n]*'", r"\1 '%s'" % volid, data)
open(config_path, "w").write(data)
images = [
os.path.join(tmp_dir, "images", "efiboot.img"),
]
for image in images:
if not os.path.isfile(image):
continue
mount_tmp_dir = tempfile.mkdtemp(prefix="tweak_buildinstall")
cmd = ["mount", "-o", "loop", image, mount_tmp_dir]
run(cmd)
for config in configs:
config_path = os.path.join(tmp_dir, config)
config_in_image = os.path.join(mount_tmp_dir, config)
if os.path.isfile(config_in_image):
cmd = ["cp", "-v", "--remove-destination", config_path, config_in_image]
run(cmd)
cmd = ["umount", mount_tmp_dir]
run(cmd)
shutil.rmtree(mount_tmp_dir)
# HACK: make buildinstall files world readable
run("chmod -R a+rX %s" % pipes.quote(tmp_dir))
# copy temp to dst
cmd = "cp -av --remove-destination %s/* %s/" % (pipes.quote(tmp_dir), pipes.quote(dst))
run(cmd)
shutil.rmtree(tmp_dir)
def symlink_boot_iso(compose, arch, variant):
if arch == "src":
return
symlink_isos_to = compose.conf.get("symlink_isos_to", None)
os_tree = compose.paths.compose.os_tree(arch, variant)
# TODO: find in treeinfo?
boot_iso_path = os.path.join(os_tree, "images", "boot.iso")
if not os.path.isfile(boot_iso_path):
return
msg = "Symlinking boot.iso (arch: %s, variant: %s)" % (arch, variant)
new_boot_iso_path = compose.paths.compose.iso_path(arch, variant, disc_type="boot", disc_num=None, suffix=".iso", symlink_to=symlink_isos_to)
new_boot_iso_relative_path = compose.paths.compose.iso_path(arch, variant, disc_type="boot", disc_num=None, suffix=".iso", relative=True)
if os.path.exists(new_boot_iso_path):
# TODO: log
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info("[BEGIN] %s" % msg)
# can't make a hardlink - possible cross-device link due to 'symlink_to' argument
symlink_target = relative_path(boot_iso_path, new_boot_iso_path)
os.symlink(symlink_target, new_boot_iso_path)
iso = IsoWrapper()
implant_md5 = iso.get_implanted_md5(new_boot_iso_path)
# compute md5sum, sha1sum, sha256sum
iso_name = os.path.basename(new_boot_iso_path)
iso_dir = os.path.dirname(new_boot_iso_path)
for cmd in iso.get_checksum_cmds(iso_name):
run(cmd, workdir=iso_dir)
# create iso manifest
run(iso.get_manifest_cmd(iso_name), workdir=iso_dir)
img = Image(compose.im)
img.implant_md5 = iso.get_implanted_md5(new_boot_iso_path)
img.path = new_boot_iso_relative_path
img.mtime = int(os.stat(new_boot_iso_path).st_mtime)
img.size = os.path.getsize(new_boot_iso_path)
img.arch = arch
img.type = "boot"
img.format = "iso"
img.disc_number = 1
img.disc_count = 1
for checksum_type in ("md5", "sha1", "sha256"):
checksum_path = new_boot_iso_path + ".%sSUM" % checksum_type.upper()
checksum_value = None
if os.path.isfile(checksum_path):
checksum_value, iso_name = read_checksum_file(checksum_path)[0]
if iso_name != os.path.basename(img.path):
# a bit paranoind check - this should never happen
raise ValueError("Image name doesn't match checksum: %s" % checksum_path)
img.add_checksum(compose.paths.compose.topdir(), checksum_type=checksum_type, checksum_value=checksum_value)
img.bootable = True
img.implant_md5 = implant_md5
try:
img.volume_id = iso.get_volume_id(new_boot_iso_path)
except RuntimeError:
pass
compose.im.add(arch, variant.uid, img)
compose.log_info("[DONE ] %s" % msg)
class BuildinstallThread(WorkerThread):
def process(self, item, num):
compose, arch, cmd = item
runroot = compose.conf.get("runroot", False)
buildinstall_method = compose.conf["buildinstall_method"]
log_file = compose.paths.log.log_file(arch, "buildinstall")
msg = "Runnging buildinstall for arch %s" % arch
output_dir = compose.paths.work.buildinstall_dir(arch)
if os.path.isdir(output_dir):
if os.listdir(output_dir):
# output dir is *not* empty -> SKIP
self.pool.log_warning("[SKIP ] %s" % msg)
return
else:
# output dir is empty -> remove it and run buildinstall
self.pool.log_debug("Removing existing (but empty) buildinstall dir: %s" % output_dir)
os.rmdir(output_dir)
self.pool.log_info("[BEGIN] %s" % msg)
task_id = None
if runroot:
# run in a koji build root
# glibc32 is needed by yaboot on ppc64
packages = ["glibc32", "strace"]
if buildinstall_method == "lorax":
packages += ["lorax"]
elif buildinstall_method == "buildinstall":
packages += ["anaconda"]
runroot_channel = compose.conf.get("runroot_channel", None)
runroot_tag = compose.conf["runroot_tag"]
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
koji_cmd = koji_wrapper.get_runroot_cmd(runroot_tag, arch, cmd, channel=runroot_channel, use_shell=True, task_id=True, packages=packages, mounts=[compose.topdir])
# avoid race conditions?
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
time.sleep(num * 3)
output = koji_wrapper.run_runroot_cmd(koji_cmd, log_file=log_file)
task_id = int(output["task_id"])
if output["retcode"] != 0:
raise RuntimeError("Runroot task failed: %s. See %s for more details." % (output["task_id"], log_file))
else:
# run locally
run(cmd, show_cmd=True, logfile=log_file)
log_file = compose.paths.log.log_file(arch, "buildinstall-RPMs")
rpms = get_buildroot_rpms(compose, task_id)
open(log_file, "w").write("\n".join(rpms))
self.pool.log_info("[DONE ] %s" % msg)

421
pungi/phases/createiso.py Normal file
View File

@ -0,0 +1,421 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import time
import pipes
import random
import shutil
import koji
import productmd.treeinfo
from productmd.imagemanifest import Image
from kobo.threads import ThreadPool, WorkerThread
from kobo.shortcuts import run, read_checksum_file, relative_path
from pypungi.wrappers.iso import IsoWrapper
from pypungi.wrappers.createrepo import CreaterepoWrapper
from pypungi.wrappers.kojiwrapper import KojiWrapper
from pypungi.wrappers.jigdo import JigdoWrapper
from pypungi.phases.base import PhaseBase
from pypungi.util import makedirs, get_volid
from pypungi.media_split import MediaSplitter
from pypungi.compose_metadata.discinfo import read_discinfo, write_discinfo
class CreateisoPhase(PhaseBase):
name = "createiso"
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.pool = ThreadPool(logger=self.compose._logger)
def run(self):
iso = IsoWrapper(logger=self.compose._logger)
symlink_isos_to = self.compose.conf.get("symlink_isos_to", None)
commands = []
for variant in self.compose.get_variants(types=["variant", "layered-product", "optional"], recursive=True):
for arch in variant.arches + ["src"]:
volid = get_volid(self.compose, arch, variant)
os_tree = self.compose.paths.compose.os_tree(arch, variant)
iso_dir = self.compose.paths.compose.iso_dir(arch, variant, symlink_to=symlink_isos_to)
if not iso_dir:
continue
found = False
for root, dirs, files in os.walk(os_tree):
if found:
break
for fn in files:
if fn.endswith(".rpm"):
found = True
break
if not found:
self.compose.log_warning("No RPMs found for %s.%s, skipping ISO" % (variant, arch))
continue
split_iso_data = split_iso(self.compose, arch, variant)
disc_count = len(split_iso_data)
for disc_num, iso_data in enumerate(split_iso_data):
disc_num += 1
# XXX: hardcoded disc_type
iso_path = self.compose.paths.compose.iso_path(arch, variant, disc_type="dvd", disc_num=disc_num, symlink_to=symlink_isos_to)
relative_iso_path = self.compose.paths.compose.iso_path(arch, variant, disc_type="dvd", disc_num=disc_num, create_dir=False, relative=True)
if os.path.isfile(iso_path):
self.compose.log_warning("Skipping mkisofs, image already exists: %s" % iso_path)
continue
iso_name = os.path.basename(iso_path)
graft_points = prepare_iso(self.compose, arch, variant, disc_num=disc_num, disc_count=disc_count, split_iso_data=iso_data)
bootable = self.compose.conf.get("bootable", False)
if arch == "src":
bootable = False
if variant.type != "variant":
bootable = False
cmd = {
"arch": arch,
"variant": variant,
"iso_path": iso_path,
"relative_iso_path": relative_iso_path,
"build_arch": arch,
"bootable": bootable,
"cmd": [],
"label": "", # currently not used
"disc_num": disc_num,
"disc_count": disc_count,
}
if os.path.islink(iso_dir):
cmd["mount"] = os.path.abspath(os.path.join(os.path.dirname(iso_dir), os.readlink(iso_dir)))
chdir_cmd = "cd %s" % pipes.quote(iso_dir)
cmd["cmd"].append(chdir_cmd)
mkisofs_kwargs = {}
if bootable:
buildinstall_method = self.compose.conf["buildinstall_method"]
if buildinstall_method == "lorax":
# TODO: $arch instead of ppc
mkisofs_kwargs["boot_args"] = iso.get_boot_options(arch, "/usr/share/lorax/config_files/ppc")
elif buildinstall_method == "buildinstall":
mkisofs_kwargs["boot_args"] = iso.get_boot_options(arch, "/usr/lib/anaconda-runtime/boot")
# ppc(64) doesn't seem to support utf-8
if arch in ("ppc", "ppc64", "ppc64le"):
mkisofs_kwargs["input_charset"] = None
mkisofs_cmd = iso.get_mkisofs_cmd(iso_name, None, volid=volid, exclude=["./lost+found"], graft_points=graft_points, **mkisofs_kwargs)
mkisofs_cmd = " ".join([pipes.quote(i) for i in mkisofs_cmd])
cmd["cmd"].append(mkisofs_cmd)
if bootable and arch == "x86_64":
isohybrid_cmd = "isohybrid --uefi %s" % pipes.quote(iso_name)
cmd["cmd"].append(isohybrid_cmd)
elif bootable and arch == "i386":
isohybrid_cmd = "isohybrid %s" % pipes.quote(iso_name)
cmd["cmd"].append(isohybrid_cmd)
# implant MD5SUM to iso
isomd5sum_cmd = iso.get_implantisomd5_cmd(iso_name, self.compose.supported)
isomd5sum_cmd = " ".join([pipes.quote(i) for i in isomd5sum_cmd])
cmd["cmd"].append(isomd5sum_cmd)
# compute md5sum, sha1sum, sha256sum
cmd["cmd"].extend(iso.get_checksum_cmds(iso_name))
# create iso manifest
cmd["cmd"].append(iso.get_manifest_cmd(iso_name))
# create jigdo
jigdo = JigdoWrapper(logger=self.compose._logger)
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
files = [
{
"path": os_tree,
"label": None,
"uri": None,
}
]
jigdo_cmd = jigdo.get_jigdo_cmd(iso_path, files, output_dir=jigdo_dir, no_servers=True, report="noprogress")
jigdo_cmd = " ".join([pipes.quote(i) for i in jigdo_cmd])
cmd["cmd"].append(jigdo_cmd)
cmd["cmd"] = " && ".join(cmd["cmd"])
commands.append(cmd)
for cmd in commands:
self.pool.add(CreateIsoThread(self.pool))
self.pool.queue_put((self.compose, cmd))
self.pool.start()
def stop(self, *args, **kwargs):
PhaseBase.stop(self, *args, **kwargs)
if self.skip():
return
class CreateIsoThread(WorkerThread):
def fail(self, compose, cmd):
compose.log_error("CreateISO failed, removing ISO: %s" % cmd["iso_path"])
try:
# remove incomplete ISO
os.unlink(cmd["iso_path"])
# TODO: remove jigdo & template & checksums
except OSError:
pass
def process(self, item, num):
compose, cmd = item
mounts = [compose.topdir]
if "mount" in cmd:
mounts.append(cmd["mount"])
runroot = compose.conf.get("runroot", False)
bootable = compose.conf.get("bootable", False)
log_file = compose.paths.log.log_file(cmd["arch"], "createiso-%s" % os.path.basename(cmd["iso_path"]))
msg = "Creating ISO (arch: %s, variant: %s): %s" % (cmd["arch"], cmd["variant"], os.path.basename(cmd["iso_path"]))
self.pool.log_info("[BEGIN] %s" % msg)
if runroot:
# run in a koji build root
packages = ["coreutils", "genisoimage", "isomd5sum", "jigdo", "strace", "lsof"]
if bootable:
buildinstall_method = compose.conf["buildinstall_method"]
if buildinstall_method == "lorax":
packages += ["lorax"]
elif buildinstall_method == "buildinstall":
packages += ["anaconda"]
runroot_channel = compose.conf.get("runroot_channel", None)
runroot_tag = compose.conf["runroot_tag"]
# get info about build arches in buildroot_tag
koji_url = compose.conf["pkgset_koji_url"]
koji_proxy = koji.ClientSession(koji_url)
tag_info = koji_proxy.getTag(runroot_tag)
tag_arches = tag_info["arches"].split(" ")
if not cmd["bootable"]:
if "x86_64" in tag_arches:
# assign non-bootable images to x86_64 if possible
cmd["build_arch"] = "x86_64"
elif cmd["build_arch"] == "src":
# pick random arch from available runroot tag arches
cmd["build_arch"] = random.choice(tag_arches)
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
koji_cmd = koji_wrapper.get_runroot_cmd(runroot_tag, cmd["build_arch"], cmd["cmd"], channel=runroot_channel, use_shell=True, task_id=True, packages=packages, mounts=mounts)
# avoid race conditions?
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
time.sleep(num * 3)
output = koji_wrapper.run_runroot_cmd(koji_cmd, log_file=log_file)
if output["retcode"] != 0:
self.fail(compose, cmd)
raise RuntimeError("Runroot task failed: %s. See %s for more details." % (output["task_id"], log_file))
else:
# run locally
try:
run(cmd["cmd"], show_cmd=True, logfile=log_file)
except:
self.fail(compose, cmd)
raise
iso = IsoWrapper()
img = Image(compose.im)
img.path = cmd["relative_iso_path"]
img.mtime = int(os.stat(cmd["iso_path"]).st_mtime)
img.size = os.path.getsize(cmd["iso_path"])
img.arch = cmd["arch"]
# XXX: HARDCODED
img.type = "dvd"
img.format = "iso"
img.disc_number = cmd["disc_num"]
img.disc_count = cmd["disc_count"]
for checksum_type in ("md5", "sha1", "sha256"):
checksum_path = cmd["iso_path"] + ".%sSUM" % checksum_type.upper()
checksum_value = None
if os.path.isfile(checksum_path):
checksum_value, iso_name = read_checksum_file(checksum_path)[0]
if iso_name != os.path.basename(img.path):
# a bit paranoind check - this should never happen
raise ValueError("Image name doesn't match checksum: %s" % checksum_path)
img.add_checksum(compose.paths.compose.topdir(), checksum_type=checksum_type, checksum_value=checksum_value)
img.bootable = cmd["bootable"]
img.implant_md5 = iso.get_implanted_md5(cmd["iso_path"])
try:
img.volume_id = iso.get_volume_id(cmd["iso_path"])
except RuntimeError:
pass
compose.im.add(cmd["arch"], cmd["variant"].uid, img)
# TODO: supported_iso_bit
# add: boot.iso
self.pool.log_info("[DONE ] %s" % msg)
def split_iso(compose, arch, variant):
# XXX: hardcoded
media_size = 4700000000
media_reserve = 10 * 1024 * 1024
ms = MediaSplitter(str(media_size - media_reserve))
os_tree = compose.paths.compose.os_tree(arch, variant)
extra_files_dir = compose.paths.work.extra_files_dir(arch, variant)
# ti_path = os.path.join(os_tree, ".treeinfo")
# ti = productmd.treeinfo.TreeInfo()
# ti.load(ti_path)
# scan extra files to mark them "sticky" -> they'll be on all media after split
extra_files = set()
for root, dirs, files in os.walk(extra_files_dir):
for fn in files:
path = os.path.join(root, fn)
rel_path = relative_path(path, extra_files_dir.rstrip("/") + "/")
extra_files.add(rel_path)
packages = []
all_files = []
all_files_ignore = []
ti = productmd.treeinfo.TreeInfo()
ti.load(os.path.join(os_tree, ".treeinfo"))
boot_iso_rpath = ti.images.images.get(arch, {}).get("boot.iso", None)
if boot_iso_rpath:
all_files_ignore.append(boot_iso_rpath)
compose.log_debug("split_iso all_files_ignore = %s" % ", ".join(all_files_ignore))
for root, dirs, files in os.walk(os_tree):
for dn in dirs[:]:
repo_dir = os.path.join(root, dn)
if repo_dir == os.path.join(compose.paths.compose.repository(arch, variant), "repodata"):
dirs.remove(dn)
for fn in files:
path = os.path.join(root, fn)
rel_path = relative_path(path, os_tree.rstrip("/") + "/")
sticky = rel_path in extra_files
if rel_path in all_files_ignore:
compose.log_info("split_iso: Skipping %s" % rel_path)
continue
if root == compose.paths.compose.packages(arch, variant):
packages.append((path, os.path.getsize(path), sticky))
else:
all_files.append((path, os.path.getsize(path), sticky))
for path, size, sticky in all_files + packages:
ms.add_file(path, size, sticky)
return ms.split()
def prepare_iso(compose, arch, variant, disc_num=1, disc_count=None, split_iso_data=None):
tree_dir = compose.paths.compose.os_tree(arch, variant)
iso_dir = compose.paths.work.iso_dir(arch, variant, disc_num=disc_num)
# modify treeinfo
ti_path = os.path.join(tree_dir, ".treeinfo")
ti = productmd.treeinfo.TreeInfo()
ti.load(ti_path)
ti.media.totaldiscs = disc_count or 1
ti.media.discnum = disc_num
# remove boot.iso from all sections
paths = set()
for platform in ti.images.images:
if "boot.iso" in ti.images.images[platform]:
paths.add(ti.images.images[platform].pop("boot.iso"))
# remove boot.iso from checksums
for i in paths:
if i in ti.checksums.checksums.keys():
del ti.checksums.checksums[i]
# make a copy of isolinux/isolinux.bin, images/boot.img - they get modified when mkisofs is called
for i in ("isolinux/isolinux.bin", "images/boot.img"):
src_path = os.path.join(tree_dir, i)
dst_path = os.path.join(iso_dir, i)
if os.path.exists(src_path):
makedirs(os.path.dirname(dst_path))
shutil.copy2(src_path, dst_path)
if disc_count > 1:
# remove repodata/repomd.xml from checksums, create a new one later
if "repodata/repomd.xml" in ti.checksums.checksums:
del ti.checksums.checksums["repodata/repomd.xml"]
# rebuild repodata
createrepo_c = compose.conf.get("createrepo_c", False)
createrepo_checksum = compose.conf.get("createrepo_checksum", None)
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
file_list = "%s-file-list" % iso_dir
packages_dir = compose.paths.compose.packages(arch, variant)
file_list_content = []
for i in split_iso_data["files"]:
if not i.endswith(".rpm"):
continue
if not i.startswith(packages_dir):
continue
rel_path = relative_path(i, tree_dir.rstrip("/") + "/")
file_list_content.append(rel_path)
if file_list_content:
# write modified repodata only if there are packages available
run("cp -a %s/repodata %s/" % (pipes.quote(tree_dir), pipes.quote(iso_dir)))
open(file_list, "w").write("\n".join(file_list_content))
cmd = repo.get_createrepo_cmd(tree_dir, update=True, database=True, skip_stat=True, pkglist=file_list, outputdir=iso_dir, workers=3, checksum=createrepo_checksum)
run(cmd)
# add repodata/repomd.xml back to checksums
ti.checksums.add(iso_dir, "repodata/repomd.xml")
new_ti_path = os.path.join(iso_dir, ".treeinfo")
ti.dump(new_ti_path)
# modify discinfo
di_path = os.path.join(tree_dir, ".discinfo")
data = read_discinfo(di_path)
data["disc_numbers"] = [disc_num]
new_di_path = os.path.join(iso_dir, ".discinfo")
write_discinfo(new_di_path, **data)
i = IsoWrapper()
if not disc_count or disc_count == 1:
data = i.get_graft_points([tree_dir, iso_dir])
else:
data = i.get_graft_points([i._paths_from_list(tree_dir, split_iso_data["files"]), iso_dir])
# TODO: /content /graft-points
gp = "%s-graft-points" % iso_dir
i.write_graft_points(gp, data, exclude=["*/lost+found", "*/boot.iso"])
return gp

205
pungi/phases/createrepo.py Normal file
View File

@ -0,0 +1,205 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__all__ = (
"create_variant_repo",
)
import os
import glob
import shutil
import tempfile
import threading
from kobo.threads import ThreadPool, WorkerThread
from kobo.shortcuts import run, relative_path
from pypungi.wrappers.scm import get_dir_from_scm
from pypungi.wrappers.createrepo import CreaterepoWrapper
from pypungi.phases.base import PhaseBase
createrepo_lock = threading.Lock()
createrepo_dirs = set()
class CreaterepoPhase(PhaseBase):
name = "createrepo"
config_options = (
{
"name": "createrepo_c",
"expected_types": [bool],
"optional": True,
},
{
"name": "createrepo_checksum",
"expected_types": [bool],
"optional": True,
},
{
"name": "product_id",
"expected_types": [dict],
"optional": True,
},
{
"name": "product_id_allow_missing",
"expected_types": [bool],
"optional": True,
},
)
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.pool = ThreadPool(logger=self.compose._logger)
def run(self):
get_productids_from_scm(self.compose)
for i in range(3):
self.pool.add(CreaterepoThread(self.pool))
for arch in self.compose.get_arches():
for variant in self.compose.get_variants(arch=arch):
self.pool.queue_put((self.compose, arch, variant, "rpm"))
self.pool.queue_put((self.compose, arch, variant, "debuginfo"))
for variant in self.compose.get_variants():
self.pool.queue_put((self.compose, None, variant, "srpm"))
self.pool.start()
def create_variant_repo(compose, arch, variant, pkg_type):
createrepo_c = compose.conf.get("createrepo_c", False)
createrepo_checksum = compose.conf.get("createrepo_checksum", None)
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
if pkg_type == "srpm":
repo_dir_arch = compose.paths.work.arch_repo(arch="global")
else:
repo_dir_arch = compose.paths.work.arch_repo(arch=arch)
if pkg_type == "rpm":
repo_dir = compose.paths.compose.repository(arch=arch, variant=variant)
package_dir = compose.paths.compose.packages(arch, variant)
elif pkg_type == "srpm":
repo_dir = compose.paths.compose.repository(arch="src", variant=variant)
package_dir = compose.paths.compose.packages("src", variant)
elif pkg_type == "debuginfo":
repo_dir = compose.paths.compose.debug_repository(arch=arch, variant=variant)
package_dir = compose.paths.compose.debug_packages(arch, variant)
else:
raise ValueError("Unknown package type: %s" % pkg_type)
if not repo_dir:
return
msg = "Creating repo (arch: %s, variant: %s): %s" % (arch, variant, repo_dir)
# HACK: using global lock
createrepo_lock.acquire()
if repo_dir in createrepo_dirs:
compose.log_warning("[SKIP ] Already in progress: %s" % msg)
createrepo_lock.release()
return
createrepo_dirs.add(repo_dir)
createrepo_lock.release()
if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")):
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info("[BEGIN] %s" % msg)
file_list = None
if repo_dir != package_dir:
rel_dir = relative_path(package_dir.rstrip("/") + "/", repo_dir.rstrip("/") + "/")
file_list = compose.paths.work.repo_package_list(arch, variant, pkg_type)
f = open(file_list, "w")
for i in os.listdir(package_dir):
if i.endswith(".rpm"):
f.write("%s\n" % os.path.join(rel_dir, i))
f.close()
comps_path = None
if compose.has_comps and pkg_type == "rpm":
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
cmd = repo.get_createrepo_cmd(repo_dir, update=True, database=True, skip_stat=True, pkglist=file_list, outputdir=repo_dir, workers=3, groupfile=comps_path, update_md_path=repo_dir_arch, checksum=createrepo_checksum)
# cmd.append("-vvv")
log_file = compose.paths.log.log_file(arch, "createrepo-%s" % variant)
run(cmd, logfile=log_file, show_cmd=True)
# call modifyrepo to inject productid
product_id = compose.conf.get("product_id")
if product_id and pkg_type == "rpm":
# add product certificate to base (rpm) repo; skip source and debug
product_id_path = compose.paths.work.product_id(arch, variant)
if os.path.isfile(product_id_path):
cmd = repo.get_modifyrepo_cmd(os.path.join(repo_dir, "repodata"), product_id_path, compress_type="gz")
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
run(cmd, logfile=log_file, show_cmd=True)
# productinfo is not supported by modifyrepo in any way
# this is a HACK to make CDN happy (dmach: at least I think, need to confirm with dgregor)
shutil.copy2(product_id_path, os.path.join(repo_dir, "repodata", "productid"))
compose.log_info("[DONE ] %s" % msg)
class CreaterepoThread(WorkerThread):
def process(self, item, num):
compose, arch, variant, pkg_type = item
create_variant_repo(compose, arch, variant, pkg_type=pkg_type)
def get_productids_from_scm(compose):
# product_id is a scm_dict: {scm, repo, branch, dir}
# expected file name format: $variant_uid-$arch-*.pem
product_id = compose.conf.get("product_id")
if not product_id:
compose.log_info("No product certificates specified")
return
product_id_allow_missing = compose.conf.get("product_id_allow_missing", False)
msg = "Getting product certificates from SCM..."
compose.log_info("[BEGIN] %s" % msg)
tmp_dir = tempfile.mkdtemp(prefix="pungi_")
get_dir_from_scm(product_id, tmp_dir)
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch):
# some layered products may use base product name before variant
pem_files = glob.glob("%s/*%s-%s-*.pem" % (tmp_dir, variant.uid, arch))
# use for development:
# pem_files = glob.glob("%s/*.pem" % tmp_dir)[-1:]
if not pem_files:
msg = "No product certificate found (arch: %s, variant: %s)" % (arch, variant.uid)
if product_id_allow_missing:
compose.log_warning(msg)
continue
else:
shutil.rmtree(tmp_dir)
raise RuntimeError(msg)
if len(pem_files) > 1:
shutil.rmtree(tmp_dir)
raise RuntimeError("Multiple product certificates found (arch: %s, variant: %s): %s" % (arch, variant.uid, ", ".join(sorted([os.path.basename(i) for i in pem_files]))))
product_id_path = compose.paths.work.product_id(arch, variant)
shutil.copy2(pem_files[0], product_id_path)
shutil.rmtree(tmp_dir)
compose.log_info("[DONE ] %s" % msg)

View File

@ -0,0 +1,96 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import copy
import fnmatch
import pipes
from kobo.shortcuts import run
from pypungi.util import get_arch_variant_data, pkg_is_rpm
from pypungi.arch import split_name_arch
from pypungi.wrappers.scm import get_file_from_scm, get_dir_from_scm
from pypungi.phases.base import PhaseBase
class ExtraFilesPhase(PhaseBase):
"""EXTRA_FILES"""
name = "extra_files"
config_options = (
{
"name": "extra_files",
"expected_types": [list],
"optional": True
},
)
def __init__(self, compose, pkgset_phase):
PhaseBase.__init__(self, compose)
# pkgset_phase provides package_sets and path_prefix
self.pkgset_phase = pkgset_phase
def run(self):
for arch in self.compose.get_arches() + ["src"]:
for variant in self.compose.get_variants(arch=arch):
copy_extra_files(self.compose, arch, variant, self.pkgset_phase.package_sets)
def copy_extra_files(compose, arch, variant, package_sets):
if "extra_files" not in compose.conf:
return
var_dict = {
"arch": arch,
"variant_id": variant.id,
"variant_id_lower": variant.id.lower(),
"variant_uid": variant.uid,
"variant_uid_lower": variant.uid.lower(),
}
msg = "Getting extra files (arch: %s, variant: %s)" % (arch, variant)
# no skip (yet?)
compose.log_info("[BEGIN] %s" % msg)
os_tree = compose.paths.compose.os_tree(arch, variant)
extra_files_dir = compose.paths.work.extra_files_dir(arch, variant)
for scm_dict in get_arch_variant_data(compose.conf, "extra_files", arch, variant):
scm_dict = copy.deepcopy(scm_dict)
# if scm is "rpm" and repo contains a package name, find the package(s) in package set
if scm_dict["scm"] == "rpm" and not (scm_dict["repo"].startswith("/") or "://" in scm_dict["repo"]):
rpms = []
for pkgset_file in package_sets[arch]:
pkg_obj = package_sets[arch][pkgset_file]
if not pkg_is_rpm(pkg_obj):
continue
pkg_name, pkg_arch = split_name_arch(scm_dict["repo"] % var_dict)
if fnmatch.fnmatch(pkg_obj.name, pkg_name) and pkg_arch is None or pkg_arch == pkg_obj.arch:
rpms.append(pkg_obj.file_path)
scm_dict["repo"] = rpms
if "file" in scm_dict:
get_file_from_scm(scm_dict, os.path.join(extra_files_dir, scm_dict.get("target", "").lstrip("/")), logger=compose._logger)
else:
get_dir_from_scm(scm_dict, os.path.join(extra_files_dir, scm_dict.get("target", "").lstrip("/")), logger=compose._logger)
if os.listdir(extra_files_dir):
cmd = "cp -av --remove-destination %s/* %s/" % (pipes.quote(extra_files_dir), pipes.quote(os_tree))
run(cmd)
compose.log_info("[DONE ] %s" % msg)

View File

@ -0,0 +1,515 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import tempfile
import shutil
import json
from kobo.rpmlib import parse_nvra
from productmd import RpmManifest
from pypungi.wrappers.scm import get_file_from_scm
from link import link_files
from pypungi.util import get_arch_variant_data, get_arch_data
from pypungi.phases.base import PhaseBase
from pypungi.arch import split_name_arch, get_compatible_arches
def get_gather_source(name):
import pypungi.phases.gather.sources
from source import GatherSourceContainer
GatherSourceContainer.register_module(pypungi.phases.gather.sources)
container = GatherSourceContainer()
return container["GatherSource%s" % name]
def get_gather_method(name):
import pypungi.phases.gather.methods
from method import GatherMethodContainer
GatherMethodContainer.register_module(pypungi.phases.gather.methods)
container = GatherMethodContainer()
return container["GatherMethod%s" % name]
class GatherPhase(PhaseBase):
"""GATHER"""
name = "gather"
config_options = (
{
"name": "multilib_arches",
"expected_types": [list],
"optional": True,
},
{
"name": "gather_lookaside_repos",
"expected_types": [list],
"optional": True,
},
{
"name": "multilib_methods",
"expected_types": [list],
},
{
"name": "greedy_method",
"expected_values": ["none", "all", "build"],
"optional": True,
},
{
"name": "gather_fulltree",
"expected_types": [bool],
"optional": True,
},
{
"name": "gather_prepopulate",
"expected_types": [str, dict],
"optional": True,
},
# DEPRECATED OPTIONS
{
"name": "additional_packages_multiarch",
"deprecated": True,
"comment": "Use multilib_whitelist instead",
},
{
"name": "filter_packages_multiarch",
"deprecated": True,
"comment": "Use multilib_blacklist instead",
},
)
def __init__(self, compose, pkgset_phase):
PhaseBase.__init__(self, compose)
# pkgset_phase provides package_sets and path_prefix
self.pkgset_phase = pkgset_phase
@staticmethod
def check_deps():
pass
def check_config(self):
errors = []
for i in ["product_name", "product_short", "product_version"]:
errors.append(self.conf_assert_str(i))
def run(self):
pkg_map = gather_wrapper(self.compose, self.pkgset_phase.package_sets, self.pkgset_phase.path_prefix)
manifest_file = self.compose.paths.compose.metadata("rpms.json")
manifest = RpmManifest()
manifest.compose.id = self.compose.compose_id
manifest.compose.type = self.compose.compose_type
manifest.compose.date = self.compose.compose_date
manifest.compose.respin = self.compose.compose_respin
for arch in self.compose.get_arches():
for variant in self.compose.get_variants(arch=arch):
link_files(self.compose, arch, variant, pkg_map[arch][variant.uid], self.pkgset_phase.package_sets, manifest=manifest)
self.compose.log_info("Writing RPM manifest: %s" % manifest_file)
manifest.dump(manifest_file)
def get_parent_pkgs(arch, variant, result_dict):
result = {
"rpm": set(),
"srpm": set(),
"debuginfo": set(),
}
if variant.parent is None:
return result
for pkg_type, pkgs in result_dict.get(arch, {}).get(variant.parent.uid, {}).iteritems():
for pkg in pkgs:
nvra = parse_nvra(pkg["path"])
result[pkg_type].add((nvra["name"], nvra["arch"]))
return result
def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None):
# multilib is per-arch, common for all variants
multilib_whitelist = get_multilib_whitelist(compose, arch)
multilib_blacklist = get_multilib_blacklist(compose, arch)
GatherMethod = get_gather_method(compose.conf["gather_method"])
msg = "Gathering packages (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg)
packages, groups, filter_packages = get_variant_packages(compose, arch, variant, package_sets)
prepopulate = get_prepopulate_packages(compose, arch, variant)
fulltree_excludes = fulltree_excludes or set()
method = GatherMethod(compose)
pkg_map = method(arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, package_sets, fulltree_excludes=fulltree_excludes, prepopulate=prepopulate)
compose.log_info("[DONE ] %s" % msg)
return pkg_map
def write_packages(compose, arch, variant, pkg_map, path_prefix):
msg = "Writing package list (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg)
for pkg_type, pkgs in pkg_map.iteritems():
file_name = compose.paths.work.package_list(arch=arch, variant=variant, pkg_type=pkg_type)
pkg_list = open(file_name, "w")
for pkg in pkgs:
# TODO: flags?
pkg_path = pkg["path"]
if pkg_path.startswith(path_prefix):
pkg_path = pkg_path[len(path_prefix):]
pkg_list.write("%s\n" % pkg_path)
pkg_list.close()
compose.log_info("[DONE ] %s" % msg)
def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs=None):
"""Remove parent variant's packages from pkg_map <-- it gets modified in this function"""
# TODO: remove debuginfo and srpm leftovers
if not variant.parent:
return
msg = "Trimming package list (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg)
remove_pkgs = remove_pkgs or {}
parent_pkgs = parent_pkgs or {}
addon_pkgs = {}
move_to_parent_pkgs = {}
removed_pkgs = {}
for pkg_type, pkgs in pkg_map.iteritems():
addon_pkgs.setdefault(pkg_type, set())
move_to_parent_pkgs.setdefault(pkg_type, [])
removed_pkgs.setdefault(pkg_type, [])
new_pkgs = []
for pkg in pkgs:
pkg_path = pkg["path"]
if not pkg_path:
continue
nvra = parse_nvra(pkg_path)
key = ((nvra["name"], nvra["arch"]))
if nvra["name"] in remove_pkgs.get(pkg_type, set()):
# TODO: make an option to turn this off
if variant.type == "layered-product" and pkg_type in ("srpm", "debuginfo"):
new_pkgs.append(pkg)
# User may not have addons available, therefore we need to
# keep addon SRPMs in layered products in order not to violate GPL.
# The same applies on debuginfo availability.
continue
compose.log_warning("Removed addon package (arch: %s, variant: %s): %s: %s" % (arch, variant, pkg_type, pkg_path))
removed_pkgs[pkg_type].append(pkg)
elif key not in parent_pkgs.get(pkg_type, set()):
if "input" in pkg["flags"]:
new_pkgs.append(pkg)
addon_pkgs[pkg_type].add(nvra["name"])
elif "fulltree-exclude" in pkg["flags"]:
# if a package wasn't explicitly included ('input') in an addon,
# move it to parent variant (cannot move it to optional, because addons can't depend on optional)
# this is a workaround for not having $addon-optional
move_to_parent_pkgs[pkg_type].append(pkg)
else:
new_pkgs.append(pkg)
addon_pkgs[pkg_type].add(nvra["name"])
else:
removed_pkgs[pkg_type].append(pkg)
pkgs[:] = new_pkgs
compose.log_info("Removed packages (arch: %s, variant: %s): %s: %s" % (arch, variant, pkg_type, len(removed_pkgs[pkg_type])))
compose.log_info("Moved to parent (arch: %s, variant: %s): %s: %s" % (arch, variant, pkg_type, len(move_to_parent_pkgs[pkg_type])))
compose.log_info("[DONE ] %s" % msg)
return addon_pkgs, move_to_parent_pkgs, removed_pkgs
def gather_wrapper(compose, package_sets, path_prefix):
result = {}
# gather packages: variants
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["variant"]):
fulltree_excludes = set()
pkg_map = gather_packages(compose, arch, variant, package_sets, fulltree_excludes=fulltree_excludes)
result.setdefault(arch, {})[variant.uid] = pkg_map
# gather packages: addons
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["addon"]):
fulltree_excludes = set()
for pkg_name, pkg_arch in get_parent_pkgs(arch, variant, result)["srpm"]:
fulltree_excludes.add(pkg_name)
pkg_map = gather_packages(compose, arch, variant, package_sets, fulltree_excludes=fulltree_excludes)
result.setdefault(arch, {})[variant.uid] = pkg_map
# gather packages: layered-products
# NOTE: the same code as for addons
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["layered-product"]):
fulltree_excludes = set()
for pkg_name, pkg_arch in get_parent_pkgs(arch, variant, result)["srpm"]:
fulltree_excludes.add(pkg_name)
pkg_map = gather_packages(compose, arch, variant, package_sets, fulltree_excludes=fulltree_excludes)
result.setdefault(arch, {})[variant.uid] = pkg_map
# gather packages: optional
# NOTE: the same code as for variants
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["optional"]):
fulltree_excludes = set()
pkg_map = gather_packages(compose, arch, variant, package_sets, fulltree_excludes=fulltree_excludes)
result.setdefault(arch, {})[variant.uid] = pkg_map
# trim packages: addons
all_addon_pkgs = {}
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["addon"]):
pkg_map = result[arch][variant.uid]
parent_pkgs = get_parent_pkgs(arch, variant, result)
addon_pkgs, move_to_parent_pkgs, removed_pkgs = trim_packages(compose, arch, variant, pkg_map, parent_pkgs)
# update all_addon_pkgs
for pkg_type, pkgs in addon_pkgs.iteritems():
all_addon_pkgs.setdefault(pkg_type, set()).update(pkgs)
# move packages to parent
parent_pkg_map = result[arch][variant.parent.uid]
for pkg_type, pkgs in move_to_parent_pkgs.iteritems():
for pkg in pkgs:
compose.log_debug("Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s" % (arch, variant.uid, pkg_type, os.path.basename(pkg["path"])))
if pkg not in parent_pkg_map[pkg_type]:
parent_pkg_map[pkg_type].append(pkg)
# trim packages: layered-products
all_lp_pkgs = {}
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["layered-product"]):
pkg_map = result[arch][variant.uid]
parent_pkgs = get_parent_pkgs(arch, variant, result)
lp_pkgs, move_to_parent_pkgs, removed_pkgs = trim_packages(compose, arch, variant, pkg_map, parent_pkgs, remove_pkgs=all_addon_pkgs)
# update all_addon_pkgs
for pkg_type, pkgs in lp_pkgs.iteritems():
all_lp_pkgs.setdefault(pkg_type, set()).update(pkgs)
# move packages to parent
# XXX: do we really want this?
parent_pkg_map = result[arch][variant.parent.uid]
for pkg_type, pkgs in move_to_parent_pkgs.iteritems():
for pkg in pkgs:
compose.log_debug("Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s" % (arch, variant.uid, pkg_type, os.path.basename(pkg["path"])))
if pkg not in parent_pkg_map[pkg_type]:
parent_pkg_map[pkg_type].append(pkg)
# merge all_addon_pkgs with all_lp_pkgs
for pkg_type in set(all_addon_pkgs.keys()) | set(all_lp_pkgs.keys()):
all_addon_pkgs.setdefault(pkg_type, set()).update(all_lp_pkgs.get(pkg_type, set()))
# trim packages: variants
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["optional"]):
pkg_map = result[arch][variant.uid]
addon_pkgs, move_to_parent_pkgs, removed_pkgs = trim_packages(compose, arch, variant, pkg_map, remove_pkgs=all_addon_pkgs)
# trim packages: optional
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, types=["optional"]):
pkg_map = result[arch][variant.uid]
parent_pkgs = get_parent_pkgs(arch, variant, result)
addon_pkgs, move_to_parent_pkgs, removed_pkgs = trim_packages(compose, arch, variant, pkg_map, parent_pkgs, remove_pkgs=all_addon_pkgs)
# write packages (package lists) for all variants
for arch in compose.get_arches():
for variant in compose.get_variants(arch=arch, recursive=True):
pkg_map = result[arch][variant.uid]
write_packages(compose, arch, variant, pkg_map, path_prefix=path_prefix)
return result
def write_prepopulate_file(compose):
if not compose.conf.get("gather_prepopulate", None):
return
prepopulate_file = os.path.join(compose.paths.work.topdir(arch="global"), "prepopulate.json")
msg = "Writing prepopulate file: %s" % prepopulate_file
if compose.DEBUG and os.path.isfile(prepopulate_file):
compose.log_warning("[SKIP ] %s" % msg)
else:
scm_dict = compose.conf["gather_prepopulate"]
if isinstance(scm_dict, dict):
file_name = os.path.basename(scm_dict["file"])
if scm_dict["scm"] == "file":
scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"]))
else:
file_name = os.path.basename(scm_dict)
scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict))
compose.log_debug(msg)
tmp_dir = tempfile.mkdtemp(prefix="prepopulate_file_")
get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger)
shutil.copy2(os.path.join(tmp_dir, file_name), prepopulate_file)
shutil.rmtree(tmp_dir)
def get_prepopulate_packages(compose, arch, variant):
result = set()
prepopulate_file = os.path.join(compose.paths.work.topdir(arch="global"), "prepopulate.json")
if not os.path.isfile(prepopulate_file):
return result
prepopulate_data = json.load(open(prepopulate_file, "r"))
if variant:
variants = [variant.uid]
else:
# ALL variants
variants = prepopulate_data.keys()
for var in variants:
for build, packages in prepopulate_data.get(var, {}).get(arch, {}).iteritems():
for i in packages:
pkg_name, pkg_arch = split_name_arch(i)
if pkg_arch not in get_compatible_arches(arch, multilib=True):
raise ValueError("Incompatible package arch '%s' for tree arch '%s'" % (pkg_arch, arch))
result.add(i)
return result
def get_additional_packages(compose, arch, variant):
result = set()
for i in get_arch_variant_data(compose.conf, "additional_packages", arch, variant):
pkg_name, pkg_arch = split_name_arch(i)
if pkg_arch is not None and pkg_arch not in get_compatible_arches(arch, multilib=True):
raise ValueError("Incompatible package arch '%s' for tree arch '%s'" % (pkg_arch, arch))
result.add((pkg_name, pkg_arch))
return result
def get_filter_packages(compose, arch, variant):
result = set()
for i in get_arch_variant_data(compose.conf, "filter_packages", arch, variant):
result.add(split_name_arch(i))
return result
def get_multilib_whitelist(compose, arch):
return set(get_arch_data(compose.conf, "multilib_whitelist", arch))
def get_multilib_blacklist(compose, arch):
return set(get_arch_data(compose.conf, "multilib_blacklist", arch))
def get_lookaside_repos(compose, arch, variant):
return get_arch_variant_data(compose.conf, "gather_lookaside_repos", arch, variant)
def get_variant_packages(compose, arch, variant, package_sets=None):
GatherSource = get_gather_source(compose.conf["gather_source"])
source = GatherSource(compose)
packages, groups = source(arch, variant)
# if compose.conf["gather_source"] == "comps":
# packages = set()
filter_packages = set()
# no variant -> no parent -> we have everything we need
# doesn't make sense to do any package filtering
if variant is None:
return packages, groups, filter_packages
packages |= get_additional_packages(compose, arch, variant)
filter_packages |= get_filter_packages(compose, arch, variant)
system_release_packages, system_release_filter_packages = get_system_release_packages(compose, arch, variant, package_sets)
packages |= system_release_packages
filter_packages |= system_release_filter_packages
# if the variant is "optional", include all groups and packages
# from the main "variant" and all "addons"
if variant.type == "optional":
for var in variant.parent.get_variants(arch=arch, types=["self", "variant", "addon", "layered-product"]):
var_packages, var_groups, var_filter_packages = get_variant_packages(compose, arch, var, package_sets=package_sets)
packages |= var_packages
groups |= var_groups
# we don't always want automatical inheritance of filtered packages from parent to child variants
# filter_packages |= var_filter_packages
if variant.type in ["addon", "layered-product"]:
var_packages, var_groups, var_filter_packages = get_variant_packages(compose, arch, variant.parent, package_sets=package_sets)
packages |= var_packages
groups |= var_groups
# filter_packages |= var_filter_packages
return packages, groups, filter_packages
def get_system_release_packages(compose, arch, variant, package_sets):
packages = set()
filter_packages = set()
if not variant:
# include all system-release-* (gathering for a package superset)
return packages, filter_packages
if not package_sets or not package_sets.get(arch, None):
return packages, filter_packages
package_set = package_sets[arch]
system_release_packages = set()
for i in package_set:
pkg = package_set[i]
if pkg.is_system_release:
system_release_packages.add(pkg)
if not system_release_packages:
return packages, filter_packages
elif len(system_release_packages) == 1:
# always include system-release package if available
pkg = list(system_release_packages)[0]
packages.add((pkg.name, None))
else:
if variant.type == "variant":
# search for best match
best_match = None
for pkg in system_release_packages:
if pkg.name.endswith("release-%s" % variant.uid.lower()) or pkg.name.startswith("%s-release" % variant.uid.lower()):
best_match = pkg
break
else:
# addons: return release packages from parent variant
return get_system_release_packages(compose, arch, variant.parent, package_sets)
if not best_match:
# no package matches variant name -> pick the first one
best_match = sorted(system_release_packages)[0]
packages.add((best_match.name, None))
for pkg in system_release_packages:
if pkg.name == best_match.name:
continue
filter_packages.add((pkg.name, None))
return packages, filter_packages

102
pungi/phases/gather/link.py Normal file
View File

@ -0,0 +1,102 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import kobo.rpmlib
from pypungi.linker import LinkerThread, LinkerPool
# TODO: global Linker instance - to keep hardlinks on dest?
# DONE: show overall progress, not each file
# TODO: (these should be logged separately)
def _get_src_nevra(compose, pkg_obj, srpm_map):
"""Return source N-E:V-R.A.rpm; guess if necessary."""
result = srpm_map.get(pkg_obj.sourcerpm, None)
if not result:
nvra = kobo.rpmlib.parse_nvra(pkg_obj.sourcerpm)
nvra["epoch"] = pkg_obj.epoch
result = kobo.rpmlib.make_nvra(nvra, add_rpm=True, force_epoch=True)
compose.log_warning("Package %s has no SRPM available, guessing epoch: %s" % (pkg_obj.nevra, result))
return result
def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={}):
# srpm_map instance is shared between link_files() runs
pkg_set = pkg_sets[arch]
msg = "Linking packages (arch: %s, variant: %s)" % (arch, variant)
compose.log_info("[BEGIN] %s" % msg)
link_type = compose.conf.get("link_type", "hardlink-or-copy")
pool = LinkerPool(link_type, logger=compose._logger)
for i in range(10):
pool.add(LinkerThread(pool))
packages_dir = compose.paths.compose.packages("src", variant)
packages_dir_relpath = compose.paths.compose.packages("src", variant, relative=True)
for pkg in pkg_map["srpm"]:
dst = os.path.join(packages_dir, os.path.basename(pkg["path"]))
dst_relpath = os.path.join(packages_dir_relpath, os.path.basename(pkg["path"]))
# link file
pool.queue_put((pkg["path"], dst))
# update rpm manifest
pkg_obj = pkg_set[pkg["path"]]
nevra = pkg_obj.nevra
manifest.add("src", variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="source")
# update srpm_map
srpm_map.setdefault(pkg_obj.file_name, nevra)
packages_dir = compose.paths.compose.packages(arch, variant)
packages_dir_relpath = compose.paths.compose.packages(arch, variant, relative=True)
for pkg in pkg_map["rpm"]:
dst = os.path.join(packages_dir, os.path.basename(pkg["path"]))
dst_relpath = os.path.join(packages_dir_relpath, os.path.basename(pkg["path"]))
# link file
pool.queue_put((pkg["path"], dst))
# update rpm manifest
pkg_obj = pkg_set[pkg["path"]]
nevra = pkg_obj.nevra
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
manifest.add(arch, variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="package", srpm_nevra=src_nevra)
packages_dir = compose.paths.compose.debug_packages(arch, variant)
packages_dir_relpath = compose.paths.compose.debug_packages(arch, variant, relative=True)
for pkg in pkg_map["debuginfo"]:
dst = os.path.join(packages_dir, os.path.basename(pkg["path"]))
dst_relpath = os.path.join(packages_dir_relpath, os.path.basename(pkg["path"]))
# link file
pool.queue_put((pkg["path"], dst))
# update rpm manifest
pkg_obj = pkg_set[pkg["path"]]
nevra = pkg_obj.nevra
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
manifest.add(arch, variant.uid, nevra, path=dst_relpath, sigkey=pkg_obj.signature, rpm_type="debug", srpm_nevra=src_nevra)
pool.start()
pool.stop()
compose.log_info("[DONE ] %s" % msg)

View File

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import kobo.plugins
from pypungi.checks import validate_options
class GatherMethodBase(kobo.plugins.Plugin):
config_options = ()
def __init__(self, compose):
self.compose = compose
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
class GatherMethodContainer(kobo.plugins.PluginContainer):
@classmethod
def normalize_name(cls, name):
return name.lower()

View File

View File

@ -0,0 +1,174 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import tempfile
from kobo.shortcuts import run
from pypungi.util import rmtree
from pypungi.wrappers.pungi import PungiWrapper
from pypungi.arch import tree_arch_to_yum_arch
import pypungi.phases.gather
import pypungi.phases.gather.method
class GatherMethodDeps(pypungi.phases.gather.method.GatherMethodBase):
enabled = True
config_options = (
{
"name": "gather_method",
"expected_types": [str],
"expected_values": ["deps"],
},
{
"name": "check_deps",
"expected_types": [bool],
},
{
"name": "gather_fulltree",
"expected_types": [bool],
"optional": True,
},
{
"name": "gather_selfhosting",
"expected_types": [bool],
"optional": True,
},
)
def __call__(self, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, package_sets, path_prefix=None, fulltree_excludes=None, prepopulate=None):
# result = {
# "rpm": [],
# "srpm": [],
# "debuginfo": [],
# }
write_pungi_config(self.compose, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, package_set=package_sets[arch], fulltree_excludes=fulltree_excludes, prepopulate=prepopulate)
result = resolve_deps(self.compose, arch, variant)
check_deps(self.compose, arch, variant)
return result
def write_pungi_config(compose, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, repos=None, comps_repo=None, package_set=None, fulltree_excludes=None, prepopulate=None):
"""write pungi config (kickstart) for arch/variant"""
pungi = PungiWrapper()
pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)
msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)
if compose.DEBUG and os.path.isfile(pungi_cfg):
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info(msg)
if not repos:
repo_path = compose.paths.work.arch_repo(arch=arch)
repos = {"pungi-repo": repo_path}
lookaside_repos = {}
for i, repo_url in enumerate(pypungi.phases.gather.get_lookaside_repos(compose, arch, variant)):
lookaside_repos["lookaside-repo-%s" % i] = repo_url
packages_str = []
for pkg_name, pkg_arch in sorted(packages):
if pkg_arch:
packages_str.append("%s.%s" % (pkg_name, pkg_arch))
else:
packages_str.append(pkg_name)
filter_packages_str = []
for pkg_name, pkg_arch in sorted(filter_packages):
if pkg_arch:
filter_packages_str.append("%s.%s" % (pkg_name, pkg_arch))
else:
filter_packages_str.append(pkg_name)
pungi.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str, exclude_packages=filter_packages_str, comps_repo=comps_repo, lookaside_repos=lookaside_repos, fulltree_excludes=fulltree_excludes, multilib_whitelist=multilib_whitelist, multilib_blacklist=multilib_blacklist, prepopulate=prepopulate)
def resolve_deps(compose, arch, variant):
pungi = PungiWrapper()
pungi_log = compose.paths.work.pungi_log(arch, variant)
msg = "Running pungi (arch: %s, variant: %s)" % (arch, variant)
if compose.DEBUG and os.path.exists(pungi_log):
compose.log_warning("[SKIP ] %s" % msg)
return pungi.get_packages(open(pungi_log, "r").read())
compose.log_info("[BEGIN] %s" % msg)
pungi_conf = compose.paths.work.pungi_conf(arch, variant)
multilib_methods = compose.conf.get("multilib_methods", None)
multilib_methods = compose.conf.get("multilib_methods", None)
is_multilib = arch in compose.conf["multilib_arches"]
if not is_multilib:
multilib_methods = None
greedy_method = compose.conf.get("greedy_method", "none")
# variant
fulltree = compose.conf.get("gather_fulltree", False)
selfhosting = compose.conf.get("gather_selfhosting", False)
# optional
if variant.type == "optional":
fulltree = True
selfhosting = True
# addon
if variant.type in ["addon", "layered-product"]:
# packages having SRPM in parent variant are excluded from fulltree (via %fulltree-excludes)
fulltree = True
selfhosting = False
lookaside_repos = {}
for i, repo_url in enumerate(pypungi.phases.gather.get_lookaside_repos(compose, arch, variant)):
lookaside_repos["lookaside-repo-%s" % i] = repo_url
yum_arch = tree_arch_to_yum_arch(arch)
tmp_dir = compose.paths.work.tmp_dir(arch, variant)
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
cmd = pungi.get_pungi_cmd(pungi_conf, destdir=tmp_dir, name=variant.uid, selfhosting=selfhosting, fulltree=fulltree, arch=yum_arch, full_archlist=True, greedy=greedy_method, cache_dir=cache_dir, lookaside_repos=lookaside_repos, multilib_methods=multilib_methods)
# Use temp working directory directory as workaround for
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
tmp_dir = tempfile.mkdtemp(prefix="pungi_")
try:
run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir)
finally:
rmtree(tmp_dir)
result = pungi.get_packages(open(pungi_log, "r").read())
compose.log_info("[DONE ] %s" % msg)
return result
def check_deps(compose, arch, variant):
check_deps = compose.conf.get("check_deps", True)
if not check_deps:
return
pungi = PungiWrapper()
pungi_log = compose.paths.work.pungi_log(arch, variant)
missing_deps = pungi.get_missing_deps(open(pungi_log, "r").read())
if missing_deps:
for pkg in sorted(missing_deps):
compose.log_error("Unresolved dependencies in package %s: %s" % (pkg, sorted(missing_deps[pkg])))
raise RuntimeError("Unresolved dependencies detected")

View File

@ -0,0 +1,94 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import pypungi.arch
from pypungi.util import pkg_is_rpm, pkg_is_srpm, pkg_is_debug
import pypungi.phases.gather.method
class GatherMethodNodeps(pypungi.phases.gather.method.GatherMethodBase):
enabled = True
config_options = (
{
"name": "gather_method",
"expected_types": [str],
"expected_values": ["nodeps"],
},
)
def __call__(self, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, package_sets, path_prefix=None, fulltree_excludes=None, prepopulate=None):
global_pkgset = package_sets["global"]
result = {
"rpm": [],
"srpm": [],
"debuginfo": [],
}
seen_rpms = {}
seen_srpms = {}
valid_arches = pypungi.arch.get_valid_arches(arch, multilib=True)
compatible_arches = {}
for i in valid_arches:
compatible_arches[i] = pypungi.arch.get_compatible_arches(i)
for i in global_pkgset:
pkg = global_pkgset[i]
if not pkg_is_rpm(pkg):
continue
for pkg_name, pkg_arch in packages:
if pkg.arch not in valid_arches:
continue
if pkg.name != pkg_name:
continue
if pkg_arch is not None and pkg.arch != pkg_arch:
continue
result["rpm"].append({
"path": pkg.file_path,
"flags": ["input"],
})
seen_rpms.setdefault(pkg.name, set()).add(pkg.arch)
seen_srpms.setdefault(pkg.sourcerpm, set()).add(pkg.arch)
for i in global_pkgset:
pkg = global_pkgset[i]
if not pkg_is_srpm(pkg):
continue
if pkg.file_name in seen_srpms:
result["srpm"].append({
"path": pkg.file_path,
"flags": ["input"],
})
for i in global_pkgset:
pkg = global_pkgset[i]
if pkg.arch not in valid_arches:
continue
if not pkg_is_debug(pkg):
continue
if pkg.sourcerpm not in seen_srpms:
continue
if not set(compatible_arches[pkg.arch]) & set(seen_srpms[pkg.sourcerpm]):
# this handles stuff like i386 debuginfo in a i686 package
continue
result["debuginfo"].append({
"path": pkg.file_path,
"flags": ["input"],
})
return result

View File

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import kobo.plugins
from pypungi.checks import validate_options
class GatherSourceBase(kobo.plugins.Plugin):
config_options = ()
def __init__(self, compose):
self.compose = compose
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
class GatherSourceContainer(kobo.plugins.PluginContainer):
@classmethod
def normalize_name(cls, name):
return name.lower()

View File

View File

@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Get a package list based on comps.xml.
Input format:
see comps.dtd
Output:
set([(rpm_name, rpm_arch or None)])
"""
from pypungi.wrappers.comps import CompsWrapper
import pypungi.phases.gather.source
class GatherSourceComps(pypungi.phases.gather.source.GatherSourceBase):
enabled = True
config_options = (
{
"name": "gather_source",
"expected_types": [str],
"expected_values": ["comps"],
},
{
"name": "comps_file",
"expected_types": [str, dict],
},
)
def __call__(self, arch, variant):
groups = set()
comps = CompsWrapper(self.compose.paths.work.comps(arch=arch))
if variant is not None:
# get packages for a particular variant
comps.filter_groups(variant.groups)
for i in comps.get_comps_groups():
groups.add(i.groupid)
return set(), groups

View File

@ -0,0 +1,71 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Get a package list based on a JSON mapping.
Input format:
{
variant: {
tree_arch: {
rpm_name: [rpm_arch, rpm_arch, ... (or None for any/best arch)],
}
}
}
Output:
set([(rpm_name, rpm_arch or None)])
"""
import json
import pypungi.phases.gather.source
class GatherSourceJson(pypungi.phases.gather.source.GatherSourceBase):
enabled = True
config_options = (
{
"name": "gather_source",
"expected_types": [str],
"expected_values": ["json"],
},
{
"name": "gather_source_mapping",
"expected_types": [str],
},
)
def __call__(self, arch, variant):
json_path = self.compose.conf["gather_source_mapping"]
data = open(json_path, "r").read()
mapping = json.loads(data)
packages = set()
if variant is None:
# get all packages for all variants
for variant_uid in mapping:
for pkg_name, pkg_arches in mapping[variant_uid][arch].iteritems():
for pkg_arch in pkg_arches:
packages.add((pkg_name, pkg_arch))
else:
# get packages for a particular variant
for pkg_name, pkg_arches in mapping[variant.uid][arch].iteritems():
for pkg_arch in pkg_arches:
packages.add((pkg_name, pkg_arch))
return packages, set()

View File

@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Get an empty package list.
Input:
none
Output:
set()
"""
import pypungi.phases.gather.source
class GatherSourceNone(pypungi.phases.gather.source.GatherSourceBase):
enabled = True
config_options = (
{
"name": "gather_source",
"expected_types": [str],
"expected_values": ["none"],
},
)
def __call__(self, arch, variant):
return set(), set()

267
pungi/phases/init.py Normal file
View File

@ -0,0 +1,267 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import tempfile
import shutil
from kobo.shortcuts import run
from pypungi.phases.base import PhaseBase
from pypungi.phases.gather import write_prepopulate_file
from pypungi.wrappers.createrepo import CreaterepoWrapper
from pypungi.wrappers.comps import CompsWrapper
from pypungi.wrappers.scm import get_file_from_scm
class InitPhase(PhaseBase):
"""INIT is a mandatory phase"""
name = "init"
config_options = (
# PRODUCT INFO
{
"name": "product_name",
"expected_types": [str],
},
{
"name": "product_short",
"expected_types": [str],
},
{
"name": "product_version",
"expected_types": [str],
},
{
# override description in .discinfo; accepts %(variant_name)s and %(arch)s variables
"name": "product_discinfo_description",
"expected_types": [str],
"optional": True,
},
{
"name": "product_is_layered",
"expected_types": [bool],
"requires": (
(lambda x: bool(x), ["base_product_name", "base_product_short", "base_product_version"]),
),
"conflicts": (
(lambda x: not bool(x), ["base_product_name", "base_product_short", "base_product_version"]),
),
},
# BASE PRODUCT INFO (FOR A LAYERED PRODUCT ONLY)
{
"name": "base_product_name",
"expected_types": [str],
"optional": True,
},
{
"name": "base_product_short",
"expected_types": [str],
"optional": True,
},
{
"name": "base_product_version",
"expected_types": [str],
"optional": True,
},
{
"name": "comps_file",
"expected_types": [str, dict],
"optional": True,
},
{
"name": "comps_filter_environments", # !!! default is True !!!
"expected_types": [bool],
"optional": True,
},
{
"name": "variants_file",
"expected_types": [str, dict],
},
{
"name": "sigkeys",
"expected_types": [list],
},
{
"name": "tree_arches",
"expected_types": [list],
"optional": True,
},
{
"name": "tree_variants",
"expected_types": [list],
"optional": True,
},
{
"name": "multilib_arches",
"expected_types": [list],
"optional": True,
},
# CREATEREPO SETTINGS
{
"name": "createrepo_c",
"expected_types": [bool],
"optional": True,
},
{
"name": "createrepo_checksum",
"expected_types": [str],
"expected_values": ["sha256", "sha"],
"optional": True,
},
# RUNROOT SETTINGS
{
"name": "runroot",
"expected_types": [bool],
"requires": (
(lambda x: bool(x), ["runroot_tag", "runroot_channel"]),
),
"conflicts": (
(lambda x: not bool(x), ["runroot_tag", "runroot_channel"]),
),
},
{
"name": "runroot_tag",
"expected_types": [str],
"optional": True,
},
{
"name": "runroot_channel",
"expected_types": [str],
"optional": True,
},
)
def skip(self):
# INIT must never be skipped,
# because it generates data for LIVEIMAGES
return False
def run(self):
# write global comps and arch comps
write_global_comps(self.compose)
for arch in self.compose.get_arches():
write_arch_comps(self.compose, arch)
# create comps repos
for arch in self.compose.get_arches():
create_comps_repo(self.compose, arch)
# write variant comps
for variant in self.compose.get_variants():
for arch in variant.arches:
write_variant_comps(self.compose, arch, variant)
# download variants.xml / product.xml?
# write prepopulate file
write_prepopulate_file(self.compose)
def write_global_comps(compose):
if not compose.has_comps:
return
comps_file_global = compose.paths.work.comps(arch="global")
msg = "Writing global comps file: %s" % comps_file_global
if compose.DEBUG and os.path.isfile(comps_file_global):
compose.log_warning("[SKIP ] %s" % msg)
else:
scm_dict = compose.conf["comps_file"]
if isinstance(scm_dict, dict):
comps_name = os.path.basename(scm_dict["file"])
if scm_dict["scm"] == "file":
scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"])
else:
comps_name = os.path.basename(scm_dict)
scm_dict = os.path.join(compose.config_dir, scm_dict)
compose.log_debug(msg)
tmp_dir = tempfile.mkdtemp(prefix="comps_")
get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger)
shutil.copy2(os.path.join(tmp_dir, comps_name), comps_file_global)
shutil.rmtree(tmp_dir)
def write_arch_comps(compose, arch):
if not compose.has_comps:
return
comps_file_arch = compose.paths.work.comps(arch=arch)
msg = "Writing comps file for arch '%s': %s" % (arch, comps_file_arch)
if compose.DEBUG and os.path.isfile(comps_file_arch):
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_debug(msg)
run(["comps_filter", "--arch=%s" % arch, "--no-cleanup", "--output=%s" % comps_file_arch, compose.paths.work.comps(arch="global")])
def write_variant_comps(compose, arch, variant):
if not compose.has_comps:
return
comps_file = compose.paths.work.comps(arch=arch, variant=variant)
msg = "Writing comps file (arch: %s, variant: %s): %s" % (arch, variant, comps_file)
if compose.DEBUG and os.path.isfile(comps_file):
# read display_order and groups for environments (needed for live images)
comps = CompsWrapper(comps_file)
# groups = variant.groups
comps.filter_groups(variant.groups)
if compose.conf.get("comps_filter_environments", True):
comps.filter_environments(variant.environments)
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_debug(msg)
run(["comps_filter", "--arch=%s" % arch, "--keep-empty-group=conflicts", "--keep-empty-group=conflicts-%s" % variant.uid.lower(), "--output=%s" % comps_file, compose.paths.work.comps(arch="global")])
comps = CompsWrapper(comps_file)
comps.filter_groups(variant.groups)
if compose.conf.get("comps_filter_environments", True):
comps.filter_environments(variant.environments)
comps.write_comps()
def create_comps_repo(compose, arch):
if not compose.has_comps:
return
createrepo_c = compose.conf.get("createrepo_c", False)
createrepo_checksum = compose.conf.get("createrepo_checksum", None)
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
comps_repo = compose.paths.work.comps_repo(arch=arch)
comps_path = compose.paths.work.comps(arch=arch)
msg = "Creating comps repo for arch '%s'" % arch
if compose.DEBUG and os.path.isdir(os.path.join(comps_repo, "repodata")):
compose.log_warning("[SKIP ] %s" % msg)
else:
compose.log_info("[BEGIN] %s" % msg)
cmd = repo.get_createrepo_cmd(comps_repo, update=True, database=True, skip_stat=True, outputdir=comps_repo, groupfile=comps_path, checksum=createrepo_checksum)
run(cmd, logfile=compose.paths.log.log_file("global", "arch_repo"), show_cmd=True)
compose.log_info("[DONE ] %s" % msg)

224
pungi/phases/live_images.py Normal file
View File

@ -0,0 +1,224 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import sys
import copy
import time
import pipes
import shutil
import re
import tempfile
from kobo.threads import ThreadPool, WorkerThread
from kobo.shortcuts import run
from pypungi.wrappers.kojiwrapper import KojiWrapper
from pypungi.wrappers.iso import IsoWrapper
from pypungi.wrappers.scm import get_file_from_scm
from pypungi.phases.base import PhaseBase
from pypungi.util import get_arch_variant_data
# HACK: define cmp in python3
if sys.version_info[0] == 3:
def cmp(a, b):
return (a > b) - (a < b)
class LiveImagesPhase(PhaseBase):
name = "liveimages"
config_options = (
{
"name": "live_target",
"expected_types": [str],
"optional": True,
},
)
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.pool = ThreadPool(logger=self.compose._logger)
def skip(self):
if PhaseBase.skip(self):
return True
if not self.compose.conf.get("live_images"):
return True
return False
def run(self):
symlink_isos_to = self.compose.conf.get("symlink_isos_to", None)
iso = IsoWrapper()
commands = []
for variant in self.compose.variants.values():
for arch in variant.arches + ["src"]:
ks_in = get_ks_in(self.compose, arch, variant)
if not ks_in:
continue
ks_file = tweak_ks(self.compose, arch, variant, ks_in)
iso_dir = self.compose.paths.compose.iso_dir(arch, variant, symlink_to=symlink_isos_to)
if not iso_dir:
continue
# XXX: hardcoded disc_type and disc_num
iso_path = self.compose.paths.compose.iso_path(arch, variant, disc_type="live", disc_num=None, symlink_to=symlink_isos_to)
if os.path.isfile(iso_path):
self.compose.log_warning("Skipping creating live image, it already exists: %s" % iso_path)
continue
iso_name = os.path.basename(iso_path)
cmd = {
"arch": arch,
"variant": variant,
"iso_path": iso_path,
"build_arch": arch,
"ks_file": ks_file,
"cmd": [],
"label": "", # currently not used
}
repo = self.compose.paths.compose.repository(arch, variant)
# HACK:
repo = re.sub(r"^/mnt/koji/", "https://kojipkgs.fedoraproject.org/", repo)
cmd["repos"] = [repo]
# additional repos
data = get_arch_variant_data(self.compose.conf, "live_images", arch, variant)
cmd["repos"].extend(data[0].get("additional_repos", []))
chdir_cmd = "cd %s" % pipes.quote(iso_dir)
cmd["cmd"].append(chdir_cmd)
# compute md5sum, sha1sum, sha256sum
cmd["cmd"].extend(iso.get_checksum_cmds(iso_name))
# create iso manifest
cmd["cmd"].append(iso.get_manifest_cmd(iso_name))
cmd["cmd"] = " && ".join(cmd["cmd"])
commands.append(cmd)
for cmd in commands:
self.pool.add(CreateLiveImageThread(self.pool))
self.pool.queue_put((self.compose, cmd))
self.pool.start()
def stop(self, *args, **kwargs):
PhaseBase.stop(self, *args, **kwargs)
if self.skip():
return
class CreateLiveImageThread(WorkerThread):
def fail(self, compose, cmd):
compose.log_error("LiveImage failed, removing ISO: %s" % cmd["iso_path"])
try:
# remove (possibly?) incomplete ISO
os.unlink(cmd["iso_path"])
# TODO: remove checksums
except OSError:
pass
def process(self, item, num):
compose, cmd = item
runroot = compose.conf.get("runroot", False)
log_file = compose.paths.log.log_file(cmd["arch"], "createiso-%s" % os.path.basename(cmd["iso_path"]))
msg = "Creating ISO (arch: %s, variant: %s): %s" % (cmd["arch"], cmd["variant"], os.path.basename(cmd["iso_path"]))
self.pool.log_info("[BEGIN] %s" % msg)
if runroot:
# run in a koji build root
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
name, version = compose.compose_id.rsplit("-", 1)
target = compose.conf["live_target"]
koji_cmd = koji_wrapper.get_create_image_cmd(name, version, target, cmd["build_arch"], cmd["ks_file"], cmd["repos"], image_type="live", wait=True, archive=False)
# avoid race conditions?
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
time.sleep(num * 3)
output = koji_wrapper.run_create_image_cmd(koji_cmd, log_file=log_file)
if output["retcode"] != 0:
self.fail(compose, cmd)
raise RuntimeError("LiveImage task failed: %s. See %s for more details." % (output["task_id"], log_file))
# copy finished image to isos/
image_path = koji_wrapper.get_image_path(output["task_id"])
# TODO: assert len == 1
image_path = image_path[0]
shutil.copy2(image_path, cmd["iso_path"])
# write checksum and manifest
run(cmd["cmd"])
else:
raise RuntimeError("NOT IMPLEMENTED")
self.pool.log_info("[DONE ] %s" % msg)
def get_ks_in(compose, arch, variant):
data = get_arch_variant_data(compose.conf, "live_images", arch, variant)
if not data:
return
scm_dict = data[0]["kickstart"]
if isinstance(scm_dict, dict):
if scm_dict["scm"] == "file":
file_name = os.path.basename(os.path.basename(scm_dict["file"]))
scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"]))
else:
file_name = os.path.basename(os.path.basename(scm_dict))
scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict))
tmp_dir = tempfile.mkdtemp(prefix="ks_in_")
get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger)
ks_in = os.path.join(compose.paths.work.topdir(arch), "liveimage-%s.%s.ks.in" % (variant.uid, arch))
shutil.copy2(os.path.join(tmp_dir, file_name), ks_in)
shutil.rmtree(tmp_dir)
return ks_in
def tweak_ks(compose, arch, variant, ks_in):
if variant.environments:
# get groups from default environment (with lowest display_order)
envs = copy.deepcopy(variant.environments)
envs.sort(lambda x, y: cmp(x["display_order"], y["display_order"]))
env = envs[0]
groups = sorted(env["groups"])
else:
# no environments -> get default groups
groups = []
for i in variant.groups:
if i["default"]:
groups.append(i["name"])
groups.sort()
ks_file = os.path.join(compose.paths.work.topdir(arch), "liveimage-%s.%s.ks" % (variant.uid, arch))
contents = open(ks_in, "r").read()
contents = contents.replace("__GROUPS__", "\n".join(["@%s" % i for i in groups]))
open(ks_file, "w").write(contents)
return ks_file

View File

@ -0,0 +1,102 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
from kobo.shortcuts import force_list
import pypungi.phases.pkgset.pkgsets
from pypungi.arch import get_valid_arches
from pypungi.phases.base import PhaseBase
class PkgsetPhase(PhaseBase):
"""PKGSET"""
name = "pkgset"
config_options = (
{
"name": "pkgset_source",
"expected_types": [str],
},
{
"name": "multilib_arches",
"expected_types": [list],
},
)
def run(self):
pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]
from source import PkgsetSourceContainer
import sources
PkgsetSourceContainer.register_module(sources)
container = PkgsetSourceContainer()
SourceClass = container[pkgset_source]
self.package_sets, self.path_prefix = SourceClass(self.compose)()
# TODO: per arch?
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
result = {}
for arch in compose.get_arches():
compose.log_info("Populating package set for arch: %s" % arch)
is_multilib = arch in compose.conf["multilib_arches"]
arches = get_valid_arches(arch, is_multilib, add_src=True)
pkgset = pypungi.phases.pkgset.pkgsets.PackageSetBase(compose.conf["sigkeys"], logger=compose._logger, arches=arches)
pkgset.merge(global_pkgset, arch, arches)
pkgset.save_file_list(compose.paths.work.package_list(arch=arch), remove_path_prefix=path_prefix)
result[arch] = pkgset
return result
def find_old_compose(old_compose_dirs, shortname=None, version=None):
composes = []
for compose_dir in force_list(old_compose_dirs):
if not os.path.isdir(compose_dir):
continue
# get all finished composes
for i in os.listdir(compose_dir):
# TODO: read .composeinfo
if shortname and not i.startswith(shortname):
continue
if shortname and version and not i.startswith("%s-%s" % (shortname, version)):
continue
path = os.path.join(compose_dir, i)
if not os.path.isdir(path):
continue
if os.path.islink(path):
continue
status_path = os.path.join(path, "STATUS")
if not os.path.isfile(status_path):
continue
try:
if open(status_path, "r").read().strip() in ("FINISHED", "DOOMED"):
composes.append((i, os.path.abspath(path)))
except:
continue
if not composes:
return None
return sorted(composes)[-1][1]

View File

@ -0,0 +1,136 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
from kobo.shortcuts import run, force_list, relative_path
import pypungi.phases.pkgset.pkgsets
from pypungi.arch import get_valid_arches
from pypungi.wrappers.createrepo import CreaterepoWrapper
# TODO: per arch?
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
result = {}
for arch in compose.get_arches():
compose.log_info("Populating package set for arch: %s" % arch)
is_multilib = arch in compose.conf["multilib_arches"]
arches = get_valid_arches(arch, is_multilib, add_src=True)
pkgset = pypungi.phases.pkgset.pkgsets.PackageSetBase(compose.conf["sigkeys"], logger=compose._logger, arches=arches)
pkgset.merge(global_pkgset, arch, arches)
pkgset.save_file_list(compose.paths.work.package_list(arch=arch), remove_path_prefix=path_prefix)
result[arch] = pkgset
return result
def create_global_repo(compose, path_prefix):
createrepo_c = compose.conf.get("createrepo_c", False)
createrepo_checksum = compose.conf.get("createrepo_checksum", None)
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
repo_dir_global = compose.paths.work.arch_repo(arch="global")
msg = "Running createrepo for the global package set"
if compose.DEBUG and os.path.isdir(os.path.join(repo_dir_global, "repodata")):
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info("[BEGIN] %s" % msg)
# find an old compose suitable for repodata reuse
old_compose_path = None
update_md_path = None
if compose.old_composes:
old_compose_path = find_old_compose(compose.old_composes, compose.conf["product_short"], compose.conf["product_version"], compose.conf.get("base_product_short", None), compose.conf.get("base_product_version", None))
if old_compose_path is None:
compose.log_info("No suitable old compose found in: %s" % compose.old_composes)
else:
repo_dir = compose.paths.work.arch_repo(arch="global")
rel_path = relative_path(repo_dir, os.path.abspath(compose.topdir).rstrip("/") + "/")
old_repo_dir = os.path.join(old_compose_path, rel_path)
if os.path.isdir(old_repo_dir):
compose.log_info("Using old repodata from: %s" % old_repo_dir)
update_md_path = old_repo_dir
# IMPORTANT: must not use --skip-stat here -- to make sure that correctly signed files are pulled in
cmd = repo.get_createrepo_cmd(path_prefix, update=True, database=True, skip_stat=False, pkglist=compose.paths.work.package_list(arch="global"), outputdir=repo_dir_global, baseurl="file://%s" % path_prefix, workers=5, update_md_path=update_md_path, checksum=createrepo_checksum)
run(cmd, logfile=compose.paths.log.log_file("global", "arch_repo"), show_cmd=True)
compose.log_info("[DONE ] %s" % msg)
def create_arch_repos(compose, arch, path_prefix):
createrepo_c = compose.conf.get("createrepo_c", False)
createrepo_checksum = compose.conf.get("createrepo_checksum", None)
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
repo_dir_global = compose.paths.work.arch_repo(arch="global")
repo_dir = compose.paths.work.arch_repo(arch=arch)
msg = "Running createrepo for arch '%s'" % arch
if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")):
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info("[BEGIN] %s" % msg)
comps_path = None
if compose.has_comps:
comps_path = compose.paths.work.comps(arch=arch)
cmd = repo.get_createrepo_cmd(path_prefix, update=True, database=True, skip_stat=True, pkglist=compose.paths.work.package_list(arch=arch), outputdir=repo_dir, baseurl="file://%s" % path_prefix, workers=5, groupfile=comps_path, update_md_path=repo_dir_global, checksum=createrepo_checksum)
run(cmd, logfile=compose.paths.log.log_file(arch, "arch_repo"), show_cmd=True)
compose.log_info("[DONE ] %s" % msg)
def find_old_compose(old_compose_dirs, product_short, product_version, base_product_short=None, base_product_version=None):
composes = []
for compose_dir in force_list(old_compose_dirs):
if not os.path.isdir(compose_dir):
continue
# get all finished composes
for i in os.listdir(compose_dir):
# TODO: read .composeinfo
pattern = "%s-%s" % (product_short, product_version)
if base_product_short:
pattern += "-%s" % base_product_short
if base_product_version:
pattern += "-%s" % base_product_version
if not i.startswith(pattern):
continue
path = os.path.join(compose_dir, i)
if not os.path.isdir(path):
continue
if os.path.islink(path):
continue
status_path = os.path.join(path, "STATUS")
if not os.path.isfile(status_path):
continue
try:
if open(status_path, "r").read().strip() in ("FINISHED", "DOOMED"):
composes.append((i, os.path.abspath(path)))
except:
continue
if not composes:
return None
return sorted(composes)[-1][1]

View File

@ -0,0 +1,285 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
The KojiPackageSet object obtains the latest RPMs from a Koji tag.
It automatically finds a signed copies according to *sigkey_ordering*.
"""
import os
import kobo.log
import kobo.pkgset
import kobo.rpmlib
from kobo.threads import WorkerThread, ThreadPool
from pypungi.util import pkg_is_srpm
from pypungi.arch import get_valid_arches
class ReaderPool(ThreadPool):
def __init__(self, package_set, logger=None):
ThreadPool.__init__(self, logger)
self.package_set = package_set
class ReaderThread(WorkerThread):
def process(self, item, num):
# rpm_info, build_info = item
if (num % 100 == 0) or (num == self.pool.queue_total):
self.pool.package_set.log_debug("Processed %s out of %s packages" % (num, self.pool.queue_total))
rpm_path = self.pool.package_set.get_package_path(item)
rpm_obj = self.pool.package_set.file_cache.add(rpm_path)
self.pool.package_set.rpms_by_arch.setdefault(rpm_obj.arch, []).append(rpm_obj)
if pkg_is_srpm(rpm_obj):
self.pool.package_set.srpms_by_name[rpm_obj.file_name] = rpm_obj
elif rpm_obj.arch == "noarch":
srpm = self.pool.package_set.srpms_by_name.get(rpm_obj.sourcerpm, None)
if srpm:
# HACK: copy {EXCLUDE,EXCLUSIVE}ARCH from SRPM to noarch RPMs
rpm_obj.excludearch = srpm.excludearch
rpm_obj.exclusivearch = srpm.exclusivearch
else:
self.pool.log_warning("Can't find a SRPM for %s" % rpm_obj.file_name)
class PackageSetBase(kobo.log.LoggingBase):
def __init__(self, sigkey_ordering, arches=None, logger=None):
kobo.log.LoggingBase.__init__(self, logger=logger)
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
self.sigkey_ordering = sigkey_ordering or [None]
self.arches = arches
self.rpms_by_arch = {}
self.srpms_by_name = {}
def __getitem__(self, name):
return self.file_cache[name]
def __len__(self):
return len(self.file_cache)
def __iter__(self):
for i in self.file_cache:
yield i
def __getstate__(self):
result = self.__dict__.copy()
del result["_logger"]
return result
def __setstate__(self, data):
self._logger = None
self.__dict__.update(data)
def read_packages(self, rpms, srpms):
srpm_pool = ReaderPool(self, self._logger)
rpm_pool = ReaderPool(self, self._logger)
for i in rpms:
rpm_pool.queue_put(i)
for i in srpms:
srpm_pool.queue_put(i)
thread_count = 10
for i in range(thread_count):
srpm_pool.add(ReaderThread(srpm_pool))
rpm_pool.add(ReaderThread(rpm_pool))
# process SRC and NOSRC packages first (see ReaderTread for the EXCLUDEARCH/EXCLUSIVEARCH hack for noarch packages)
self.log_debug("Package set: spawning %s worker threads (SRPMs)" % thread_count)
srpm_pool.start()
srpm_pool.stop()
self.log_debug("Package set: worker threads stopped (SRPMs)")
self.log_debug("Package set: spawning %s worker threads (RPMs)" % thread_count)
rpm_pool.start()
rpm_pool.stop()
self.log_debug("Package set: worker threads stopped (RPMs)")
return self.rpms_by_arch
def merge(self, other, primary_arch, arch_list):
msg = "Merging package sets for %s: %s" % (primary_arch, arch_list)
self.log_debug("[BEGIN] %s" % msg)
# if "src" is present, make sure "nosrc" is included too
if "src" in arch_list and "nosrc" not in arch_list:
arch_list.append("nosrc")
# make sure sources are processed last
for i in ("nosrc", "src"):
if i in arch_list:
arch_list.remove(i)
arch_list.append(i)
seen_sourcerpms = set()
# {Exclude,Exclusive}Arch must match *tree* arch + compatible native arches (excluding multilib arches)
exclusivearch_list = get_valid_arches(primary_arch, multilib=False, add_noarch=False, add_src=False)
for arch in arch_list:
self.rpms_by_arch.setdefault(arch, [])
for i in other.rpms_by_arch.get(arch, []):
if i.file_path in self.file_cache:
# TODO: test if it really works
continue
if arch == "noarch":
if i.excludearch and set(i.excludearch) & set(exclusivearch_list):
self.log_debug("Excluding (EXCLUDEARCH: %s): %s" % (sorted(set(i.excludearch)), i.file_name))
continue
if i.exclusivearch and not (set(i.exclusivearch) & set(exclusivearch_list)):
self.log_debug("Excluding (EXCLUSIVEARCH: %s): %s " % (sorted(set(i.exclusivearch)), i.file_name))
continue
if arch in ("nosrc", "src"):
# include only sources having binary packages
if i.name not in seen_sourcerpms:
continue
else:
sourcerpm_name = kobo.rpmlib.parse_nvra(i.sourcerpm)["name"]
seen_sourcerpms.add(sourcerpm_name)
self.file_cache.file_cache[i.file_path] = i
self.rpms_by_arch[arch].append(i)
self.log_debug("[DONE ] %s" % msg)
def save_file_list(self, file_path, remove_path_prefix=None):
f = open(file_path, "w")
for arch in sorted(self.rpms_by_arch):
for i in self.rpms_by_arch[arch]:
rpm_path = i.file_path
if remove_path_prefix and rpm_path.startswith(remove_path_prefix):
rpm_path = rpm_path[len(remove_path_prefix):]
f.write("%s\n" % rpm_path)
f.close()
class FilelistPackageSet(PackageSetBase):
def get_package_path(self, queue_item):
# TODO: sigkey checking
rpm_path = os.path.abspath(queue_item)
return rpm_path
def populate(self, file_list):
result_rpms = []
result_srpms = []
msg = "Getting RPMs from file list"
self.log_info("[BEGIN] %s" % msg)
for i in file_list:
if i.endswith(".src.rpm") or i.endswith(".nosrc.rpm"):
result_srpms.append(i)
else:
result_rpms.append(i)
result = self.read_packages(result_rpms, result_srpms)
self.log_info("[DONE ] %s" % msg)
return result
class KojiPackageSet(PackageSetBase):
def __init__(self, koji_proxy, sigkey_ordering, arches=None, logger=None):
PackageSetBase.__init__(self, sigkey_ordering=sigkey_ordering, arches=arches, logger=logger)
self.koji_proxy = koji_proxy
self.koji_pathinfo = getattr(__import__(koji_proxy.__module__, {}, {}, []), "pathinfo")
def __getstate__(self):
result = self.__dict__.copy()
result["koji_class"] = self.koji_proxy.__class__.__name__
result["koji_module"] = self.koji_proxy.__class__.__module__
result["koji_baseurl"] = self.koji_proxy.baseurl
result["koji_opts"] = self.koji_proxy.opts
del result["koji_proxy"]
del result["koji_pathinfo"]
del result["_logger"]
return result
def __setstate__(self, data):
class_name = data.pop("koji_class")
module_name = data.pop("koji_module")
module = __import__(module_name, {}, {}, [class_name])
cls = getattr(module, class_name)
self.koji_proxy = cls(data.pop("koji_baseurl"), data.pop("koji_opts"))
self._logger = None
self.__dict__.update(data)
def get_latest_rpms(self, tag, event, inherit=True):
return self.koji_proxy.listTaggedRPMS(tag, event=event, inherit=inherit, latest=True)
def get_package_path(self, queue_item):
rpm_info, build_info = queue_item
rpm_path = None
found = False
pathinfo = self.koji_pathinfo
for sigkey in self.sigkey_ordering:
if sigkey is None:
# we're looking for *signed* copies here
continue
sigkey = sigkey.lower()
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey))
if os.path.isfile(rpm_path):
found = True
break
if not found:
if None in self.sigkey_ordering:
# use an unsigned copy (if allowed)
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
if os.path.isfile(rpm_path):
found = True
else:
# or raise an exception
raise RuntimeError("RPM not found for sigs: %s" % self.sigkey_ordering)
if not found:
raise RuntimeError("Package not found: %s" % rpm_info)
return rpm_path
def populate(self, tag, event=None, inherit=True):
result_rpms = []
result_srpms = []
if type(event) is dict:
event = event["id"]
msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (tag, event, inherit)
self.log_info("[BEGIN] %s" % msg)
rpms, builds = self.get_latest_rpms(tag, event)
builds_by_id = {}
for build_info in builds:
builds_by_id.setdefault(build_info["build_id"], build_info)
skipped_arches = []
for rpm_info in rpms:
if self.arches and rpm_info["arch"] not in self.arches:
if rpm_info["arch"] not in skipped_arches:
self.log_debug("Skipping packages for arch: %s" % rpm_info["arch"])
skipped_arches.append(rpm_info["arch"])
continue
build_info = builds_by_id[rpm_info["build_id"]]
if rpm_info["arch"] in ("src", "nosrc"):
result_srpms.append((rpm_info, build_info))
else:
result_rpms.append((rpm_info, build_info))
result = self.read_packages(result_rpms, result_srpms)
self.log_info("[DONE ] %s" % msg)
return result

View File

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import kobo.plugins
from pypungi.checks import validate_options
class PkgsetSourceBase(kobo.plugins.Plugin):
config_options = ()
def __init__(self, compose):
self.compose = compose
def validate(self):
errors = validate_options(self.compose.conf, self.config_options)
if errors:
raise ValueError("\n".join(errors))
class PkgsetSourceContainer(kobo.plugins.PluginContainer):
@classmethod
def normalize_name(cls, name):
return name.lower()

View File

View File

@ -0,0 +1,171 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import cPickle as pickle
import json
import koji
import pypungi.phases.pkgset.pkgsets
from pypungi.arch import get_valid_arches
from pypungi.phases.pkgset.common import create_arch_repos, create_global_repo, populate_arch_pkgsets
import pypungi.phases.pkgset.source
class PkgsetSourceKoji(pypungi.phases.pkgset.source.PkgsetSourceBase):
enabled = True
config_options = (
{
"name": "pkgset_source",
"expected_types": [str],
"expected_values": "koji",
},
{
"name": "pkgset_koji_url",
"expected_types": [str],
},
{
"name": "pkgset_koji_tag",
"expected_types": [str],
},
{
"name": "pkgset_koji_inherit",
"expected_types": [bool],
"optional": True,
},
{
"name": "pkgset_koji_path_prefix",
"expected_types": [str],
},
)
def __call__(self):
compose = self.compose
koji_url = compose.conf["pkgset_koji_url"]
# koji_tag = compose.conf["pkgset_koji_tag"]
path_prefix = compose.conf["pkgset_koji_path_prefix"].rstrip("/") + "/" # must contain trailing '/'
koji_proxy = koji.ClientSession(koji_url)
package_sets = get_pkgset_from_koji(self.compose, koji_proxy, path_prefix)
return (package_sets, path_prefix)
'''
class PkgsetKojiPhase(PhaseBase):
"""PKGSET"""
name = "pkgset"
def __init__(self, compose):
PhaseBase.__init__(self, compose)
self.package_sets = None
self.path_prefix = None
def run(self):
path_prefix = self.compose.conf["koji_path_prefix"]
path_prefix = path_prefix.rstrip("/") + "/" # must contain trailing '/'
koji_url = self.compose.conf["koji_url"]
koji_proxy = koji.ClientSession(koji_url)
self.package_sets = get_pkgset_from_koji(self.compose, koji_proxy, path_prefix)
self.path_prefix = path_prefix
'''
def get_pkgset_from_koji(compose, koji_proxy, path_prefix):
event_info = get_koji_event_info(compose, koji_proxy)
tag_info = get_koji_tag_info(compose, koji_proxy)
pkgset_global = populate_global_pkgset(compose, koji_proxy, path_prefix, tag_info, event_info)
# get_extra_packages(compose, pkgset_global)
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
package_sets["global"] = pkgset_global
create_global_repo(compose, path_prefix)
for arch in compose.get_arches():
# TODO: threads? runroot?
create_arch_repos(compose, arch, path_prefix)
return package_sets
def populate_global_pkgset(compose, koji_proxy, path_prefix, compose_tag, event_id):
ALL_ARCHES = set(["src"])
for arch in compose.get_arches():
is_multilib = arch in compose.conf["multilib_arches"]
arches = get_valid_arches(arch, is_multilib)
ALL_ARCHES.update(arches)
compose_tag = compose.conf["pkgset_koji_tag"]
inherit = compose.conf.get("pkgset_koji_inherit", True)
msg = "Populating the global package set from tag '%s'" % compose_tag
global_pkgset_path = os.path.join(compose.paths.work.topdir(arch="global"), "pkgset_global.pickle")
if compose.DEBUG and os.path.isfile(global_pkgset_path):
compose.log_warning("[SKIP ] %s" % msg)
pkgset = pickle.load(open(global_pkgset_path, "r"))
else:
compose.log_info(msg)
pkgset = pypungi.phases.pkgset.pkgsets.KojiPackageSet(koji_proxy, compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES)
pkgset.populate(compose_tag, event_id, inherit=inherit)
f = open(global_pkgset_path, "w")
data = pickle.dumps(pkgset)
f.write(data)
f.close()
# write global package list
pkgset.save_file_list(compose.paths.work.package_list(arch="global"), remove_path_prefix=path_prefix)
return pkgset
def get_koji_event_info(compose, koji_proxy):
event_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-event")
if compose.koji_event:
koji_event = koji_proxy.getEvent(compose.koji_event)
compose.log_info("Setting koji event to a custom value: %s" % compose.koji_event)
json.dump(koji_event, open(event_file, "w"))
msg = "Getting koji event"
if compose.DEBUG and os.path.exists(event_file):
compose.log_warning("[SKIP ] %s" % msg)
result = json.load(open(event_file, "r"))
else:
compose.log_info(msg)
result = koji_proxy.getLastEvent()
json.dump(result, open(event_file, "w"))
compose.log_info("Koji event: %s" % result["id"])
return result
def get_koji_tag_info(compose, koji_proxy):
tag_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-tag")
msg = "Getting a koji tag info"
if compose.DEBUG and os.path.exists(tag_file):
compose.log_warning("[SKIP ] %s" % msg)
result = json.load(open(tag_file, "r"))
else:
compose.log_info(msg)
tag_name = compose.conf["pkgset_koji_tag"]
result = koji_proxy.getTag(tag_name)
if result is None:
raise ValueError("Unknown koji tag: %s" % tag_name)
result["name"] = tag_name
json.dump(result, open(tag_file, "w"))
compose.log_info("Koji compose tag: %(name)s (ID: %(id)s)" % result)
return result

View File

@ -0,0 +1,185 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import cPickle as pickle
from kobo.shortcuts import run
import pypungi.phases.pkgset.pkgsets
from pypungi.arch import get_valid_arches
from pypungi.util import makedirs
from pypungi.wrappers.pungi import PungiWrapper
from pypungi.phases.pkgset.common import create_global_repo, create_arch_repos, populate_arch_pkgsets
from pypungi.phases.gather import get_prepopulate_packages
from pypungi.linker import LinkerThread, LinkerPool
import pypungi.phases.pkgset.source
class PkgsetSourceRepos(pypungi.phases.pkgset.source.PkgsetSourceBase):
enabled = True
config_options = (
{
"name": "pkgset_source",
"expected_types": [str],
"expected_values": "repos",
},
{
"name": "pkgset_repos",
"expected_types": [dict],
},
)
def __call__(self):
package_sets, path_prefix = get_pkgset_from_repos(self.compose)
return (package_sets, path_prefix)
def get_pkgset_from_repos(compose):
# populate pkgset from yum repos
# TODO: noarch hack - secondary arches, use x86_64 noarch where possible
flist = []
link_type = compose.conf.get("link_type", "hardlink-or-copy")
pool = LinkerPool(link_type, logger=compose._logger)
for i in range(10):
pool.add(LinkerThread(pool))
seen_packages = set()
for arch in compose.get_arches():
# write a pungi config for remote repos and a local comps repo
repos = {}
for num, repo in enumerate(compose.conf["pkgset_repos"][arch]):
repo_path = repo
if "://" not in repo_path:
repo_path = os.path.join(compose.config_dir, repo)
repos["repo-%s" % num] = repo_path
comps_repo = None
if compose.has_comps:
repos["comps"] = compose.paths.work.comps_repo(arch=arch)
comps_repo = "comps"
write_pungi_config(compose, arch, None, repos=repos, comps_repo=comps_repo)
pungi = PungiWrapper()
pungi_conf = compose.paths.work.pungi_conf(arch=arch)
pungi_log = compose.paths.log.log_file(arch, "fooo")
pungi_dir = compose.paths.work.pungi_download_dir(arch)
cmd = pungi.get_pungi_cmd(pungi_conf, destdir=pungi_dir, name="FOO", selfhosting=True, fulltree=True, multilib_methods=["all"], nodownload=False, full_archlist=True, arch=arch, cache_dir=compose.paths.work.pungi_cache_dir(arch=arch))
cmd.append("--force")
# TODO: runroot
run(cmd, logfile=pungi_log, show_cmd=True, stdout=False)
path_prefix = os.path.join(compose.paths.work.topdir(arch="global"), "download") + "/"
makedirs(path_prefix)
for root, dirs, files in os.walk(pungi_dir):
for fn in files:
if not fn.endswith(".rpm"):
continue
if fn in seen_packages:
continue
seen_packages.add(fn)
src = os.path.join(root, fn)
dst = os.path.join(path_prefix, os.path.basename(src))
flist.append(dst)
pool.queue_put((src, dst))
msg = "Linking downloaded pkgset packages"
compose.log_info("[BEGIN] %s" % msg)
pool.start()
pool.stop()
compose.log_info("[DONE ] %s" % msg)
flist = sorted(set(flist))
pkgset_global = populate_global_pkgset(compose, flist, path_prefix)
# get_extra_packages(compose, pkgset_global)
package_sets = populate_arch_pkgsets(compose, path_prefix, pkgset_global)
create_global_repo(compose, path_prefix)
for arch in compose.get_arches():
# TODO: threads? runroot?
create_arch_repos(compose, arch, path_prefix)
package_sets["global"] = pkgset_global
return package_sets, path_prefix
def populate_global_pkgset(compose, file_list, path_prefix):
ALL_ARCHES = set(["src"])
for arch in compose.get_arches():
is_multilib = arch in compose.conf["multilib_arches"]
arches = get_valid_arches(arch, is_multilib)
ALL_ARCHES.update(arches)
msg = "Populating the global package set from a file list"
global_pkgset_path = os.path.join(compose.paths.work.topdir(arch="global"), "packages.pickle")
if compose.DEBUG and os.path.isfile(global_pkgset_path):
compose.log_warning("[SKIP ] %s" % msg)
pkgset = pickle.load(open(global_pkgset_path, "r"))
else:
compose.log_info(msg)
pkgset = pypungi.phases.pkgset.pkgsets.FilelistPackageSet(compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES)
pkgset.populate(file_list)
f = open(global_pkgset_path, "w")
data = pickle.dumps(pkgset)
f.write(data)
f.close()
# write global package list
pkgset.save_file_list(compose.paths.work.package_list(arch="global"), remove_path_prefix=path_prefix)
return pkgset
def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, package_set=None):
"""write pungi config (kickstart) for arch/variant"""
pungi = PungiWrapper()
pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)
msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg)
if compose.DEBUG and os.path.isfile(pungi_cfg):
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info(msg)
# TODO move to a function
gather_source = "GatherSource%s" % compose.conf["gather_source"]
from pypungi.phases.gather.source import GatherSourceContainer
import pypungi.phases.gather.sources
GatherSourceContainer.register_module(pypungi.phases.gather.sources)
container = GatherSourceContainer()
SourceClass = container[gather_source]
src = SourceClass(compose)
packages = []
pkgs, grps = src(arch, variant)
for pkg_name, pkg_arch in pkgs:
if pkg_arch is None:
packages.append(pkg_name)
else:
packages.append("%s.%s" % (pkg_name, pkg_arch))
# include *all* packages providing system-release
if "system-release" not in packages:
packages.append("system-release")
prepopulate = get_prepopulate_packages(compose, arch, None)
pungi.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=grps, packages=packages, exclude_packages=[], comps_repo=None, prepopulate=prepopulate)

279
pungi/phases/product_img.py Normal file
View File

@ -0,0 +1,279 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Expected product.img paths
==========================
RHEL 6
------
installclasses/$variant.py
locale/$lang/LC_MESSAGES/comps.mo
RHEL 7
------
run/install/product/installclasses/$variant.py
run/install/product/locale/$lang/LC_MESSAGES/comps.mo
Compatibility symlinks
----------------------
installclasses -> run/install/product/installclasses
locale -> run/install/product/locale
run/install/product/pyanaconda/installclasses -> ../installclasses
"""
import os
import fnmatch
import tempfile
import shutil
import pipes
from kobo.shortcuts import run
from pypungi.arch import split_name_arch
from pypungi.util import makedirs, pkg_is_rpm
from pypungi.phases.base import PhaseBase
from pypungi.wrappers.iso import IsoWrapper
from pypungi.wrappers.scm import get_file_from_scm, get_dir_from_scm
class ProductimgPhase(PhaseBase):
"""PRODUCTIMG"""
name = "productimg"
config_options = (
{
"name": "bootable",
"expected_types": [bool],
"expected_values": [True],
},
)
def __init__(self, compose, pkgset_phase):
PhaseBase.__init__(self, compose)
# pkgset_phase provides package_sets and path_prefix
self.pkgset_phase = pkgset_phase
def skip(self):
if PhaseBase.skip(self):
return True
if not self.compose.conf.get("bootable"):
msg = "Not a bootable product. Skipping creating product images."
self.compose.log_debug(msg)
return True
return False
def run(self):
# create PRODUCT.IMG
for variant in self.compose.get_variants():
if variant.type != "variant":
continue
create_product_img(self.compose, "global", variant)
# copy PRODUCT.IMG
for arch in self.compose.get_arches():
for variant in self.compose.get_variants(arch=arch):
if variant.type != "variant":
continue
image = self.compose.paths.work.product_img(variant)
os_tree = self.compose.paths.compose.os_tree(arch, variant)
target_dir = os.path.join(os_tree, "images")
target_path = os.path.join(target_dir, "product.img")
if not os.path.isfile(target_path):
makedirs(target_dir)
shutil.copy2(image, target_path)
for arch in self.compose.get_arches():
for variant in self.compose.get_variants(arch=arch):
if variant.type != "variant":
continue
rebuild_boot_iso(self.compose, arch, variant, self.pkgset_phase.package_sets)
def create_product_img(compose, arch, variant):
# product.img is noarch (at least on rhel6 and rhel7)
arch = "global"
msg = "Creating product.img (arch: %s, variant: %s)" % (arch, variant)
image = compose.paths.work.product_img(variant)
if os.path.exists(image):
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info("[BEGIN] %s" % msg)
product_tmp = tempfile.mkdtemp(prefix="product_img_")
install_class = compose.conf["install_class"].copy()
install_class["file"] = install_class["file"] % {"variant_id": variant.id.lower()}
install_dir = os.path.join(product_tmp, "installclasses")
makedirs(install_dir)
get_file_from_scm(install_class, target_path=install_dir, logger=None)
po_files = compose.conf["po_files"]
po_tmp = tempfile.mkdtemp(prefix="pofiles_")
get_dir_from_scm(po_files, po_tmp, logger=compose._logger)
for po_file in os.listdir(po_tmp):
if not po_file.endswith(".po"):
continue
lang = po_file[:-3]
target_dir = os.path.join(product_tmp, "locale", lang, "LC_MESSAGES")
makedirs(target_dir)
run(["msgfmt", "--output-file", os.path.join(target_dir, "comps.mo"), os.path.join(po_tmp, po_file)])
shutil.rmtree(po_tmp)
mount_tmp = tempfile.mkdtemp(prefix="product_img_mount_")
cmds = [
# allocate image
"dd if=/dev/zero of=%s bs=1k count=5760" % pipes.quote(image),
# create file system
"mke2fs -F %s" % pipes.quote(image),
"mount -o loop %s %s" % (pipes.quote(image), pipes.quote(mount_tmp)),
"mkdir -p %s/run/install/product" % pipes.quote(mount_tmp),
"cp -rp %s/* %s/run/install/product/" % (pipes.quote(product_tmp), pipes.quote(mount_tmp)),
"mkdir -p %s/run/install/product/pyanaconda" % pipes.quote(mount_tmp),
# compat symlink: installclasses -> run/install/product/installclasses
"ln -s run/install/product/installclasses %s" % pipes.quote(mount_tmp),
# compat symlink: locale -> run/install/product/locale
"ln -s run/install/product/locale %s" % pipes.quote(mount_tmp),
# compat symlink: run/install/product/pyanaconda/installclasses -> ../installclasses
"ln -s ../installclasses %s/run/install/product/pyanaconda/installclasses" % pipes.quote(mount_tmp),
"umount %s" % pipes.quote(mount_tmp),
# tweak last mount path written in the image
"tune2fs -M /run/install/product %s" % pipes.quote(image),
]
run(" && ".join(cmds))
shutil.rmtree(mount_tmp)
shutil.rmtree(product_tmp)
compose.log_info("[DONE ] %s" % msg)
def rebuild_boot_iso(compose, arch, variant, package_sets):
os_tree = compose.paths.compose.os_tree(arch, variant)
buildinstall_dir = compose.paths.work.buildinstall_dir(arch)
boot_iso = os.path.join(os_tree, "images", "boot.iso")
product_img = compose.paths.work.product_img(variant)
buildinstall_boot_iso = os.path.join(buildinstall_dir, "images", "boot.iso")
buildinstall_method = compose.conf["buildinstall_method"]
log_file = compose.paths.log.log_file(arch, "rebuild_boot_iso-%s.%s" % (variant, arch))
msg = "Rebuilding boot.iso (arch: %s, variant: %s)" % (arch, variant)
if not os.path.isfile(boot_iso):
# nothing to do
compose.log_warning("[SKIP ] %s" % msg)
return
compose.log_info("[BEGIN] %s" % msg)
iso = IsoWrapper()
# read the original volume id
volume_id = iso.get_volume_id(boot_iso)
# remove the original boot.iso (created during buildinstall) from the os dir
os.remove(boot_iso)
tmp_dir = tempfile.mkdtemp(prefix="boot_iso_")
mount_dir = tempfile.mkdtemp(prefix="boot_iso_mount_")
cmd = "mount -o loop %s %s" % (pipes.quote(buildinstall_boot_iso), pipes.quote(mount_dir))
run(cmd, logfile=log_file, show_cmd=True)
images_dir = os.path.join(tmp_dir, "images")
os.makedirs(images_dir)
shutil.copy2(product_img, os.path.join(images_dir, "product.img"))
if os.path.isfile(os.path.join(mount_dir, "isolinux", "isolinux.bin")):
os.makedirs(os.path.join(tmp_dir, "isolinux"))
shutil.copy2(os.path.join(mount_dir, "isolinux", "isolinux.bin"), os.path.join(tmp_dir, "isolinux"))
graft_points = iso.get_graft_points([mount_dir, tmp_dir])
graft_points_path = os.path.join(compose.paths.work.topdir(arch=arch), "boot-%s.%s.iso-graft-points" % (variant, arch))
iso.write_graft_points(graft_points_path, graft_points, exclude=["*/TRANS.TBL", "*/boot.cat"])
mkisofs_kwargs = {}
boot_files = None
if buildinstall_method == "lorax":
# TODO: $arch instead of ppc
mkisofs_kwargs["boot_args"] = iso.get_boot_options(arch, "/usr/share/lorax/config_files/ppc")
elif buildinstall_method == "buildinstall":
boot_files = explode_anaconda(compose, arch, variant, package_sets)
mkisofs_kwargs["boot_args"] = iso.get_boot_options(arch, boot_files)
# ppc(64) doesn't seem to support utf-8
if arch in ("ppc", "ppc64"):
mkisofs_kwargs["input_charset"] = None
mkisofs_cmd = iso.get_mkisofs_cmd(boot_iso, None, volid=volume_id, exclude=["./lost+found"], graft_points=graft_points_path, **mkisofs_kwargs)
run(mkisofs_cmd, logfile=log_file, show_cmd=True)
cmd = "umount %s" % pipes.quote(mount_dir)
run(cmd, logfile=log_file, show_cmd=True)
if arch == "x86_64":
isohybrid_cmd = "isohybrid --uefi %s" % pipes.quote(boot_iso)
run(isohybrid_cmd, logfile=log_file, show_cmd=True)
elif arch == "i386":
isohybrid_cmd = "isohybrid %s" % pipes.quote(boot_iso)
run(isohybrid_cmd, logfile=log_file, show_cmd=True)
# implant MD5SUM to iso
isomd5sum_cmd = iso.get_implantisomd5_cmd(boot_iso, compose.supported)
isomd5sum_cmd = " ".join([pipes.quote(i) for i in isomd5sum_cmd])
run(isomd5sum_cmd, logfile=log_file, show_cmd=True)
if boot_files:
shutil.rmtree(boot_files)
shutil.rmtree(tmp_dir)
shutil.rmtree(mount_dir)
# .treeinfo is written after productimg phase
# -> checksums should match
# -> no need to write/modify it here
compose.log_info("[DONE ] %s" % msg)
def explode_anaconda(compose, arch, variant, package_sets):
tmp_dir = tempfile.mkdtemp(prefix="anaconda_")
scm_dict = {
"scm": "rpm",
"repo": "anaconda.%s" % arch,
"file": [
"/usr/lib/anaconda-runtime/boot/*",
]
}
# if scm is "rpm" and repo contains a package name, find the package(s) in package set
if scm_dict["scm"] == "rpm" and not (scm_dict["repo"].startswith("/") or "://" in scm_dict["repo"]):
rpms = []
for pkgset_file in package_sets[arch]:
pkg_obj = package_sets[arch][pkgset_file]
if not pkg_is_rpm(pkg_obj):
continue
pkg_name, pkg_arch = split_name_arch(scm_dict["repo"])
if fnmatch.fnmatch(pkg_obj.name, pkg_name) and (pkg_arch is None or pkg_arch == pkg_obj.arch):
compose.log_critical("%s %s %s" % (pkg_obj.name, pkg_name, pkg_arch))
rpms.append(pkg_obj.file_path)
scm_dict["repo"] = rpms
if not rpms:
return None
get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger)
return tmp_dir

120
pungi/phases/test.py Normal file
View File

@ -0,0 +1,120 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import tempfile
from kobo.shortcuts import run
from pypungi.wrappers.repoclosure import RepoclosureWrapper
from pypungi.arch import get_valid_arches
from pypungi.phases.base import PhaseBase
from pypungi.phases.gather import get_lookaside_repos
from pypungi.util import rmtree
class TestPhase(PhaseBase):
name = "test"
def run(self):
run_repoclosure(self.compose)
def run_repoclosure(compose):
repoclosure = RepoclosureWrapper()
# TODO: Special handling for src packages (use repoclosure param builddeps)
msg = "Running repoclosure"
compose.log_info("[BEGIN] %s" % msg)
# Arch repos
for arch in compose.get_arches():
is_multilib = arch in compose.conf["multilib_arches"]
arches = get_valid_arches(arch, is_multilib)
repo_id = "repoclosure-%s" % arch
repo_dir = compose.paths.work.arch_repo(arch=arch)
lookaside = {}
if compose.conf.get("product_is_layered", False):
for i, lookaside_url in enumerate(get_lookaside_repos(compose, arch, None)):
lookaside["lookaside-%s-%s" % (arch, i)] = lookaside_url
cmd = repoclosure.get_repoclosure_cmd(repos={repo_id: repo_dir}, lookaside=lookaside, arch=arches)
# Use temp working directory directory as workaround for
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
tmp_dir = tempfile.mkdtemp(prefix="repoclosure_")
try:
run(cmd, logfile=compose.paths.log.log_file(arch, "repoclosure"), show_cmd=True, can_fail=True, workdir=tmp_dir)
finally:
rmtree(tmp_dir)
# Variant repos
all_repos = {} # to be used as lookaside for the self-hosting check
all_arches = set()
for arch in compose.get_arches():
is_multilib = arch in compose.conf["multilib_arches"]
arches = get_valid_arches(arch, is_multilib)
all_arches.update(arches)
for variant in compose.get_variants(arch=arch):
lookaside = {}
if variant.parent:
repo_id = "repoclosure-%s.%s" % (variant.parent.uid, arch)
repo_dir = compose.paths.compose.repository(arch=arch, variant=variant.parent)
lookaside[repo_id] = repo_dir
repos = {}
repo_id = "repoclosure-%s.%s" % (variant.uid, arch)
repo_dir = compose.paths.compose.repository(arch=arch, variant=variant)
repos[repo_id] = repo_dir
if compose.conf.get("product_is_layered", False):
for i, lookaside_url in enumerate(get_lookaside_repos(compose, arch, variant)):
lookaside["lookaside-%s.%s-%s" % (variant.uid, arch, i)] = lookaside_url
cmd = repoclosure.get_repoclosure_cmd(repos=repos, lookaside=lookaside, arch=arches)
# Use temp working directory directory as workaround for
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
tmp_dir = tempfile.mkdtemp(prefix="repoclosure_")
try:
run(cmd, logfile=compose.paths.log.log_file(arch, "repoclosure-%s" % variant), show_cmd=True, can_fail=True, workdir=tmp_dir)
finally:
rmtree(tmp_dir)
all_repos.update(repos)
all_repos.update(lookaside)
repo_id = "repoclosure-%s.%s" % (variant.uid, "src")
repo_dir = compose.paths.compose.repository(arch="src", variant=variant)
all_repos[repo_id] = repo_dir
# A SRPM can be built on any arch and is always rebuilt before building on the target arch.
# This means the deps can't be always satisfied within one tree arch.
# As a workaround, let's run the self-hosting check across all repos.
# XXX: This doesn't solve a situation, when a noarch package is excluded due to ExcludeArch/ExclusiveArch and it's still required on that arch.
# In this case, it's an obvious bug in the test.
# check BuildRequires (self-hosting)
cmd = repoclosure.get_repoclosure_cmd(repos=all_repos, arch=all_arches, builddeps=True)
# Use temp working directory directory as workaround for
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
tmp_dir = tempfile.mkdtemp(prefix="repoclosure_")
try:
run(cmd, logfile=compose.paths.log.log_file("global", "repoclosure-builddeps"), show_cmd=True, can_fail=True, workdir=tmp_dir)
finally:
rmtree(tmp_dir)
compose.log_info("[DONE ] %s" % msg)

View File

@ -1,4 +1,6 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
@ -12,11 +14,19 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import subprocess
import os
import shutil
import sys
import hashlib
import errno
import pipes
import re
from kobo.shortcuts import run
from productmd import get_major_version
def _doRunCommand(command, logger, rundir='/tmp', output=subprocess.PIPE, error=subprocess.PIPE, env=None):
"""Run a command and log the output. Error out if we get something on stderr"""
@ -121,3 +131,179 @@ def _doCheckSum(path, hash, logger):
myfile.close()
return '%s:%s' % (hash, sum.hexdigest())
def makedirs(path, mode=0o775):
mask = os.umask(0)
try:
os.makedirs(path, mode=mode)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
os.umask(mask)
def rmtree(path, ignore_errors=False, onerror=None):
"""shutil.rmtree ENOENT (ignoring no such file or directory) errors"""
try:
shutil.rmtree(path, ignore_errors, onerror)
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
def explode_rpm_package(pkg_path, target_dir):
"""Explode a rpm package into target_dir."""
pkg_path = os.path.abspath(pkg_path)
makedirs(target_dir)
run("rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % pipes.quote(pkg_path), workdir=target_dir)
def pkg_is_rpm(pkg_obj):
if pkg_is_srpm(pkg_obj):
return False
if pkg_is_debug(pkg_obj):
return False
return True
def pkg_is_srpm(pkg_obj):
if isinstance(pkg_obj, str):
# string, probably N.A, N-V-R.A, N-V-R.A.rpm
for i in (".src", ".nosrc", ".src.rpm", ".nosrc.rpm"):
if pkg_obj.endswith(i):
return True
else:
# package object
if pkg_obj.arch in ("src", "nosrc"):
return True
return False
def pkg_is_debug(pkg_obj):
if pkg_is_srpm(pkg_obj):
return False
if isinstance(pkg_obj, str):
# string
if "-debuginfo" in pkg_obj:
return True
else:
# package object
if "-debuginfo" in pkg_obj.name:
return True
return False
# fomat: [(variant_uid_regex, {arch|*: [data]})]
def get_arch_variant_data(conf, var_name, arch, variant):
result = []
for conf_variant, conf_data in conf.get(var_name, []):
if variant is not None and not re.match(conf_variant, variant.uid):
continue
for conf_arch in conf_data:
if conf_arch != "*" and conf_arch != arch:
continue
if conf_arch == "*" and arch == "src":
# src is excluded from '*' and needs to be explicitly added to the mapping
continue
if isinstance(conf_data[conf_arch], list):
result.extend(conf_data[conf_arch])
else:
result.append(conf_data[conf_arch])
return result
# fomat: {arch|*: [data]}
def get_arch_data(conf, var_name, arch):
result = []
for conf_arch, conf_data in conf.get(var_name, {}).items():
if conf_arch != "*" and conf_arch != arch:
continue
if conf_arch == "*" and arch == "src":
# src is excluded from '*' and needs to be explicitly added to the mapping
continue
if isinstance(conf_data, list):
result.extend(conf_data)
else:
result.append(conf_data)
return result
def get_buildroot_rpms(compose, task_id):
"""Get build root RPMs - either from runroot or local"""
result = []
if task_id:
# runroot
import koji
koji_url = compose.conf["pkgset_koji_url"]
koji_proxy = koji.ClientSession(koji_url)
buildroot_infos = koji_proxy.listBuildroots(taskID=task_id)
buildroot_info = buildroot_infos[-1]
data = koji_proxy.listRPMs(componentBuildrootID=buildroot_info["id"])
for rpm_info in data:
fmt = "%(nvr)s.%(arch)s"
result.append(fmt % rpm_info)
else:
# local
retcode, output = run("rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'")
for i in output.splitlines():
if not i:
continue
result.append(i)
result.sort()
return result
def get_volid(compose, arch, variant=None, escape_spaces=False):
"""Get ISO volume ID for arch and variant"""
if variant and variant.type == "addon":
# addons are part of parent variant media
return None
if variant and variant.type == "layered-product":
product_short = variant.product_short
product_version = variant.product_version
product_is_layered = True
base_product_short = compose.conf["product_short"]
base_product_version = get_major_version(compose.conf["product_version"])
variant_uid = variant.parent.uid
else:
product_short = compose.conf["product_short"]
product_version = compose.conf["product_version"]
product_is_layered = compose.conf["product_is_layered"]
base_product_short = compose.conf.get("base_product_short", "")
base_product_version = compose.conf.get("base_product_version", "")
variant_uid = variant and variant.uid or None
products = [
"%(product_short)s-%(product_version)s %(variant_uid)s.%(arch)s",
"%(product_short)s-%(product_version)s %(arch)s",
]
layered_products = [
"%(product_short)s-%(product_version)s %(base_product_short)s-%(base_product_version)s %(variant_uid)s.%(arch)s",
"%(product_short)s-%(product_version)s %(base_product_short)s-%(base_product_version)s %(arch)s",
]
volid = None
if product_is_layered:
all_products = layered_products + products
else:
all_products = products
for i in all_products:
if not variant_uid and "%(variant_uid)s" in i:
continue
volid = i % locals()
if len(volid) <= 32:
break
# from wrappers.iso import IsoWrapper
# iso = IsoWrapper(logger=compose._logger)
# volid = iso._truncate_volid(volid)
if len(volid) > 32:
raise ValueError("Could not create volume ID <= 32 characters")
if escape_spaces:
volid = volid.replace(" ", r"\x20")
return volid

View File

427
pungi/wrappers/comps.py Normal file
View File

@ -0,0 +1,427 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import sys
import fnmatch
import xml.dom.minidom
import yum.comps
if sys.version_info[:2] < (2, 7):
# HACK: remove spaces from text elements on py < 2.7
OldElement = xml.dom.minidom.Element
class Element(OldElement):
def writexml(self, writer, indent="", addindent="", newl=""):
if len(self.childNodes) == 1 and self.firstChild.nodeType == 3:
writer.write(indent)
OldElement.writexml(self, writer)
writer.write(newl)
else:
OldElement.writexml(self, writer, indent, addindent, newl)
xml.dom.minidom.Element = Element
class CompsWrapper(object):
"""Class for reading and retreiving information from comps XML files"""
def __init__(self, comps_file):
self.comps = yum.comps.Comps()
self.comps.add(comps_file)
self.comps_file = comps_file
def get_comps_packages(self):
"""Returns a dictionary containing all packages in comps"""
packages = set()
for group in self.comps.get_groups():
packages.update(group.packages)
return list(packages)
def get_comps_groups(self):
return self.comps.get_groups()
def write_comps(self, comps_obj=None, target_file=None):
if not comps_obj:
comps_obj = self.generate_comps()
if not target_file:
target_file = self.comps_file
stream = open(target_file, "w")
# comps_obj.writexml(stream, addindent=" ", newl="\n") # no encoding -> use toprettyxml()
stream.write(comps_obj.toprettyxml(indent=" ", encoding="UTF-8"))
stream.close()
def generate_comps(self):
impl = xml.dom.minidom.getDOMImplementation()
doctype = impl.createDocumentType("comps", "-//Red Hat, Inc.//DTD Comps info//EN", "comps.dtd")
doc = impl.createDocument(None, "comps", doctype)
msg_elem = doc.documentElement
groups = {}
for group_obj in self.comps.get_groups():
groupid = group_obj.groupid
groups[groupid] = {"group_obj": group_obj}
group_names = groups.keys()
group_names.sort()
for group_key in group_names:
group = groups[group_key]["group_obj"]
group_node = doc.createElement("group")
msg_elem.appendChild(group_node)
id_node = doc.createElement("id")
id_node.appendChild(doc.createTextNode(group.groupid))
group_node.appendChild(id_node)
name_node = doc.createElement("name")
name_node.appendChild(doc.createTextNode(group.name))
group_node.appendChild(name_node)
langs = group.translated_name.keys()
langs.sort()
for lang in langs:
text = group.translated_name[lang].decode("UTF-8")
node = doc.createElement("name")
node.setAttribute("xml:lang", lang)
node.appendChild(doc.createTextNode(text))
group_node.appendChild(node)
node = doc.createElement("description")
group_node.appendChild(node)
if group.description and group.description != "":
node.appendChild(doc.createTextNode(group.description))
langs = group.translated_description.keys()
langs.sort()
for lang in langs:
text = group.translated_description[lang].decode("UTF-8")
node = doc.createElement("description")
node.setAttribute("xml:lang", lang)
node.appendChild(doc.createTextNode(text))
group_node.appendChild(node)
node = doc.createElement("default")
if group.default:
node.appendChild(doc.createTextNode("true"))
else:
node.appendChild(doc.createTextNode("false"))
group_node.appendChild(node)
node = doc.createElement("uservisible")
if group.user_visible:
node.appendChild(doc.createTextNode("true"))
else:
node.appendChild(doc.createTextNode("false"))
group_node.appendChild(node)
if group.langonly:
node = doc.createElement("langonly")
node.appendChild(doc.createTextNode(group.langonly))
group_node.appendChild(node)
packagelist = doc.createElement("packagelist")
for package_type in ("mandatory", "default", "optional", "conditional"):
packages = getattr(group, package_type + "_packages").keys()
packages.sort()
for package in packages:
node = doc.createElement("packagereq")
node.appendChild(doc.createTextNode(package))
node.setAttribute("type", package_type)
packagelist.appendChild(node)
if package_type == "conditional":
node.setAttribute("requires", group.conditional_packages[package])
group_node.appendChild(packagelist)
categories = self.comps.get_categories()
for category in categories:
groups = set(category.groups) & set([i.groupid for i in self.comps.get_groups()])
if not groups:
continue
cat_node = doc.createElement("category")
msg_elem.appendChild(cat_node)
id_node = doc.createElement("id")
id_node.appendChild(doc.createTextNode(category.categoryid))
cat_node.appendChild(id_node)
name_node = doc.createElement("name")
name_node.appendChild(doc.createTextNode(category.name))
cat_node.appendChild(name_node)
langs = category.translated_name.keys()
langs.sort()
for lang in langs:
text = category.translated_name[lang].decode("UTF-8")
node = doc.createElement("name")
node.setAttribute("xml:lang", lang)
node.appendChild(doc.createTextNode(text))
cat_node.appendChild(node)
if category.description and category.description != "":
node = doc.createElement("description")
node.appendChild(doc.createTextNode(category.description))
cat_node.appendChild(node)
langs = category.translated_description.keys()
langs.sort()
for lang in langs:
text = category.translated_description[lang].decode("UTF-8")
node = doc.createElement("description")
node.setAttribute("xml:lang", lang)
node.appendChild(doc.createTextNode(text))
cat_node.appendChild(node)
if category.display_order is not None:
display_node = doc.createElement("display_order")
display_node.appendChild(doc.createTextNode("%s" % category.display_order))
cat_node.appendChild(display_node)
grouplist_node = doc.createElement("grouplist")
groupids = sorted(groups)
for groupid in groupids:
node = doc.createElement("groupid")
node.appendChild(doc.createTextNode(groupid))
grouplist_node.appendChild(node)
cat_node.appendChild(grouplist_node)
# XXX
environments = self.comps.get_environments()
if environments:
for environment in environments:
groups = set(environment.groups) & set([i.groupid for i in self.comps.get_groups()])
if not groups:
continue
env_node = doc.createElement("environment")
msg_elem.appendChild(env_node)
id_node = doc.createElement("id")
id_node.appendChild(doc.createTextNode(environment.environmentid))
env_node.appendChild(id_node)
name_node = doc.createElement("name")
name_node.appendChild(doc.createTextNode(environment.name))
env_node.appendChild(name_node)
langs = environment.translated_name.keys()
langs.sort()
for lang in langs:
text = environment.translated_name[lang].decode("UTF-8")
node = doc.createElement("name")
node.setAttribute("xml:lang", lang)
node.appendChild(doc.createTextNode(text))
env_node.appendChild(node)
if environment.description:
node = doc.createElement("description")
node.appendChild(doc.createTextNode(environment.description))
env_node.appendChild(node)
langs = environment.translated_description.keys()
langs.sort()
for lang in langs:
text = environment.translated_description[lang].decode("UTF-8")
node = doc.createElement("description")
node.setAttribute("xml:lang", lang)
node.appendChild(doc.createTextNode(text))
env_node.appendChild(node)
if environment.display_order is not None:
display_node = doc.createElement("display_order")
display_node.appendChild(doc.createTextNode("%s" % environment.display_order))
env_node.appendChild(display_node)
grouplist_node = doc.createElement("grouplist")
groupids = sorted(groups)
for groupid in groupids:
node = doc.createElement("groupid")
node.appendChild(doc.createTextNode(groupid))
grouplist_node.appendChild(node)
env_node.appendChild(grouplist_node)
optionids = sorted(environment.options)
if optionids:
optionlist_node = doc.createElement("optionlist")
for optionid in optionids:
node = doc.createElement("groupid")
node.appendChild(doc.createTextNode(optionid))
optionlist_node.appendChild(node)
env_node.appendChild(optionlist_node)
# XXX
langpacks = self.comps.get_langpacks()
if langpacks:
lang_node = doc.createElement("langpacks")
msg_elem.appendChild(lang_node)
for langpack in langpacks:
match_node = doc.createElement("match")
match_node.setAttribute("name", langpack["name"])
match_node.setAttribute("install", langpack["install"])
lang_node.appendChild(match_node)
return doc
def _tweak_group(self, group_obj, group_dict):
if group_dict["default"] is not None:
group_obj.default = group_dict["default"]
if group_dict["uservisible"] is not None:
group_obj.uservisible = group_dict["uservisible"]
def _tweak_env(self, env_obj, env_dict):
if env_dict["display_order"] is not None:
env_obj.display_order = env_dict["display_order"]
else:
# write actual display order back to env_dict
env_dict["display_order"] = env_obj.display_order
# write group list back to env_dict
env_dict["groups"] = env_obj.groups[:]
def filter_groups(self, group_dicts):
"""Filter groups according to group definitions in group_dicts.
group_dicts = [{
"name": group ID,
"glob": True/False -- is "name" a glob?
"default: True/False/None -- if not None, set "default" accordingly
"uservisible": True/False/None -- if not None, set "uservisible" accordingly
}]
"""
to_remove = []
for group_obj in self.comps.groups:
found = False
for group_dict in group_dicts:
if group_dict["glob"]:
if fnmatch.fnmatch(group_obj.groupid, group_dict["name"]):
found = True
self._tweak_group(group_obj, group_dict)
break
else:
if group_obj.groupid == group_dict["name"]:
self._tweak_group(group_obj, group_dict)
found = True
break
if not found:
to_remove.append(group_obj.groupid)
if to_remove:
for key, value in self.comps._groups.items():
if key in to_remove:
del self.comps._groups[key]
def filter_packages(self, pkglist):
rv = []
for group_obj in self.comps.get_groups():
for package_type in ("mandatory", "default", "optional", "conditional"):
group_pkgs = getattr(group_obj, "%s_packages" % package_type)
pkg_names = group_pkgs.keys()
pkg_names.sort()
for pkg in pkg_names:
if pkg not in pkglist:
rv.append((pkg, group_obj.name))
del group_pkgs[pkg]
rv.sort()
return rv
def filter_categories(self, catlist=None, include_empty=False):
rv = []
if catlist is not None:
for categoryobj in self.comps.get_categories():
if categoryobj.categoryid not in catlist:
rv.append(categoryobj.categoryid)
del self.comps._categories[categoryobj.categoryid]
if not include_empty:
comps_groups = [group.groupid for group in self.comps.get_groups()]
for categoryobj in self.comps.get_categories():
matched = False
groupids = categoryobj.groups
groupids.sort()
for groupid in groupids:
if groupid not in comps_groups:
del categoryobj._groups[groupid]
else:
matched = True
if not matched:
rv.append(categoryobj.categoryid)
del self.comps._categories[categoryobj.categoryid]
rv.sort()
return rv
def filter_environments(self, env_dicts):
"""Filter environments according to group definitions in group_dicts.
env_dicts = [{
"name": environment ID,
"display_order: <int>/None -- if not None, set "display_order" accordingly
}]
"""
to_remove = []
for env_obj in self.comps.environments:
found = False
for env_dict in env_dicts:
if env_obj.environmentid == env_dict["name"]:
self._tweak_env(env_obj, env_dict)
found = True
break
if not found:
to_remove.append(env_obj.environmentid)
if to_remove:
for key, value in self.comps._environments.items():
if key in to_remove:
del self.comps._environments[key]
def injectpackages(self, pkglist):
def getgroup(pkgname):
if pkgname.endswith("-devel"):
return "compat-arch-development"
elif pkgname.endswith("libs"):
return "compat-arch-libs"
else:
return "compat-arch-support"
groups_dict = {}
for group_obj in self.comps.get_groups():
groupid = group_obj.groupid
groups_dict[groupid] = {"group_obj": group_obj}
pkggroup_dict = {
"compat-arch-development": [],
"compat-arch-libs": [],
"compat-arch-support": [],
}
for pkgname in pkglist:
group = getgroup(pkgname)
pkggroup_dict[group].append(pkgname)
for group_obj in self.comps.get_groups():
groupid = group_obj.groupid
for pkg in pkggroup_dict[groupid]:
if pkg not in group_obj.packages:
group_obj.default_packages[pkg] = 1

View File

@ -0,0 +1,193 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from kobo.shortcuts import force_list
class CreaterepoWrapper(object):
def __init__(self, createrepo_c=False):
if createrepo_c:
self.createrepo = "createrepo_c"
self.mergerepo = "mergerepo_c"
else:
self.createrepo = "createrepo"
self.mergerepo = "mergerepo"
self.modifyrepo = "modifyrepo"
def get_createrepo_cmd(self, directory, baseurl=None, outputdir=None, excludes=None, pkglist=None, groupfile=None, cachedir=None,
update=True, update_md_path=None, skip_stat=False, checkts=False, split=False, pretty=True, database=True, checksum=None,
unique_md_filenames=True, distro=None, content=None, repo=None, revision=None, deltas=False, oldpackagedirs=None,
num_deltas=None, workers=None):
# groupfile = /path/to/comps.xml
cmd = [self.createrepo]
cmd.append(directory)
if baseurl:
cmd.append("--baseurl=%s" % baseurl)
if outputdir:
cmd.append("--outputdir=%s" % outputdir)
if excludes:
for i in force_list(excludes):
cmd.append("--excludes=%s" % i)
if pkglist:
cmd.append("--pkglist=%s" % pkglist)
if groupfile:
cmd.append("--groupfile=%s" % groupfile)
if cachedir:
cmd.append("--cachedir=%s" % cachedir)
if update:
cmd.append("--update")
if update_md_path:
cmd.append("--update-md-path=%s" % update_md_path)
if skip_stat:
cmd.append("--skip-stat")
if checkts:
cmd.append("--checkts")
if split:
cmd.append("--split")
# HACK:
if "createrepo_c" in self.createrepo:
pretty = False
if pretty:
cmd.append("--pretty")
if database:
cmd.append("--database")
else:
cmd.append("--no-database")
if checksum:
cmd.append("--checksum=%s" % checksum)
if unique_md_filenames:
cmd.append("--unique-md-filenames")
else:
cmd.append("--simple-md-filenames")
if distro:
for i in force_list(distro):
cmd.append("--distro=%s" % i)
if content:
for i in force_list(content):
cmd.append("--content=%s" % i)
if repo:
for i in force_list(repo):
cmd.append("--repo=%s" % i)
if revision:
cmd.append("--revision=%s" % revision)
if deltas:
cmd.append("--deltas=%s" % deltas)
if oldpackagedirs:
for i in force_list(oldpackagedirs):
cmd.append("--oldpackagedirs=%s" % i)
if num_deltas:
cmd.append("--num-deltas=%d" % int(num_deltas))
if workers:
cmd.append("--workers=%d" % int(workers))
return cmd
def get_mergerepo_cmd(self, outputdir, repos, database=True, pkglist=None, nogroups=False, noupdateinfo=None):
cmd = [self.mergerepo]
cmd.append("--outputdir=%s" % outputdir)
for repo in repos:
if "://" not in repo:
repo = "file://" + repo
cmd.append("--repo=%s" % repo)
if database:
cmd.append("--database")
else:
cmd.append("--nodatabase")
# XXX: a custom mergerepo hack, not in upstream git repo
if pkglist:
cmd.append("--pkglist=%s" % pkglist)
if nogroups:
cmd.append("--nogroups")
if noupdateinfo:
cmd.append("--noupdateinfo")
return cmd
def get_modifyrepo_cmd(self, repo_path, file_path, mdtype=None, compress_type=None, remove=False):
cmd = [self.modifyrepo]
cmd.append(file_path)
cmd.append(repo_path)
if mdtype:
cmd.append("--mdtype=%s" % mdtype)
if remove:
cmd.append("--remove")
if compress_type:
cmd.append("--compress")
cmd.append("--compress-type=%s" % compress_type)
return cmd
def get_repoquery_cmd(self, repos, whatrequires=False, alldeps=False, packages=None, tempcache=True):
cmd = ["/usr/bin/repoquery"]
if tempcache:
cmd.append("--tempcache")
# a dict is expected: {repo_name: repo_path}
for repo_name in sorted(repos):
repo_path = repos[repo_name]
if "://" not in repo_path:
repo_path = "file://" + repo_path
cmd.append("--repofrompath=%s,%s" % (repo_name, repo_path))
cmd.append("--enablerepo=%s" % repo_name)
if whatrequires:
cmd.append("--whatrequires")
if alldeps:
cmd.append("--alldeps")
if packages:
for pkg in packages:
cmd.append(pkg)
return cmd

378
pungi/wrappers/iso.py Normal file
View File

@ -0,0 +1,378 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import sys
import pipes
from fnmatch import fnmatch
import kobo.log
from kobo.shortcuts import force_list, relative_path, run
# HACK: define cmp in python3
if sys.version_info[0] == 3:
def cmp(a, b):
return (a > b) - (a < b)
class IsoWrapper(kobo.log.LoggingBase):
def get_boot_options(self, arch, createfrom, efi=True):
"""Checks to see what we need as the -b option for mkisofs"""
if arch in ("aarch64", ):
result = [
'-eltorito-alt-boot',
'-e', 'images/efiboot.img',
'-no-emul-boot',
]
return result
if arch in ("i386", "i686", "x86_64"):
result = [
'-b', 'isolinux/isolinux.bin',
'-c', 'isolinux/boot.cat',
'-no-emul-boot',
'-boot-load-size', '4',
'-boot-info-table',
]
# EFI args
if arch == "x86_64":
result.extend([
'-eltorito-alt-boot',
'-e', 'images/efiboot.img',
'-no-emul-boot',
])
return result
if arch == "ia64":
result = [
'-b', 'images/boot.img',
'-no-emul-boot',
]
return result
if arch in ("ppc", "ppc64", "ppc64le"):
result = [
'-part',
'-hfs',
'-r',
'-l',
'-sysid', 'PPC',
'-no-desktop',
'-allow-multidot',
'-chrp-boot',
"-map", os.path.join(createfrom, 'mapping'), # -map %s/ppc/mapping
"-magic", os.path.join(createfrom, 'magic'), # -magic %s/ppc/magic
'-hfs-bless', "/ppc/mac", # must be the last
]
return result
if arch == "sparc":
result = [
'-G', '/boot/isofs.b',
'-B', '...',
'-s', '/boot/silo.conf',
'-sparc-label', '"sparc"',
]
return result
if arch in ("s390", "s390x"):
result = [
# "-no-emul-boot",
# "-b", "images/cdboot.img",
# "-c", "boot.cat",
]
return result
raise ValueError("Unknown arch: %s" % arch)
def _truncate_volid(self, volid):
if len(volid) > 32:
old_volid = volid
volid = volid.replace("-", "")
self.log_warning("Truncating volume ID from '%s' to '%s'" % (old_volid, volid))
if len(volid) > 32:
old_volid = volid
volid = volid.replace(" ", "")
self.log_warning("Truncating volume ID from '%s' to '%s'" % (old_volid, volid))
if len(volid) > 32:
old_volid = volid
volid = volid.replace("Supplementary", "Supp")
self.log_warning("Truncating volume ID from '%s' to '%s'" % (old_volid, volid))
if len(volid) > 32:
raise ValueError("Volume ID must be less than 32 character: %s" % volid)
return volid
def get_mkisofs_cmd(self, iso, paths, appid=None, volid=None, volset=None, exclude=None, verbose=False, boot_args=None, input_charset="utf-8", graft_points=None):
# following options are always enabled
untranslated_filenames = True
translation_table = True
joliet = True
joliet_long = True
rock = True
cmd = ["/usr/bin/genisoimage"]
if appid:
cmd.extend(["-appid", appid])
if untranslated_filenames:
cmd.append("-untranslated-filenames")
if volid:
cmd.extend(["-volid", self._truncate_volid(volid)])
if joliet:
cmd.append("-J")
if joliet_long:
cmd.append("-joliet-long")
if volset:
cmd.extend(["-volset", volset])
if rock:
cmd.append("-rational-rock")
if verbose:
cmd.append("-verbose")
if translation_table:
cmd.append("-translation-table")
if input_charset:
cmd.extend(["-input-charset", input_charset])
if exclude:
for i in force_list(exclude):
cmd.extend(["-x", i])
if boot_args:
cmd.extend(boot_args)
cmd.extend(["-o", iso])
if graft_points:
cmd.append("-graft-points")
cmd.extend(["-path-list", graft_points])
else:
# we're either using graft points or file lists, not both
cmd.extend(force_list(paths))
return cmd
def get_implantisomd5_cmd(self, iso_path, supported=False):
cmd = ["/usr/bin/implantisomd5"]
if supported:
cmd.append("--supported-iso")
cmd.append(iso_path)
return cmd
def get_checkisomd5_cmd(self, iso_path, just_print=False):
cmd = ["/usr/bin/checkisomd5"]
if just_print:
cmd.append("--md5sumonly")
cmd.append(iso_path)
return cmd
def get_implanted_md5(self, iso_path):
cmd = self.get_checkisomd5_cmd(iso_path, just_print=True)
retcode, output = run(cmd)
line = output.splitlines()[0]
result = line.rsplit(":")[-1].strip()
return result
def get_checksum_cmds(self, iso_name, checksum_types=None):
checksum_types = checksum_types or ["md5", "sha1", "sha256"]
result = []
for checksum_type in checksum_types:
cmd = "%ssum -b %s > %s.%sSUM" % (checksum_type.lower(), pipes.quote(iso_name), pipes.quote(iso_name), checksum_type.upper())
result.append(cmd)
return result
def get_manifest_cmd(self, iso_name):
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (pipes.quote(iso_name), pipes.quote(iso_name))
def get_volume_id(self, path):
cmd = ["isoinfo", "-d", "-i", path]
retcode, output = run(cmd)
for line in output.splitlines():
line = line.strip()
if line.startswith("Volume id:"):
return line[11:].strip()
raise RuntimeError("Could not read Volume ID")
def get_graft_points(self, paths, exclusive_paths=None, exclude=None):
# path priority in ascending order (1st = lowest prio)
# paths merge according to priority
# exclusive paths override whole dirs
result = {}
exclude = exclude or []
exclusive_paths = exclusive_paths or []
for i in paths:
if isinstance(i, dict):
tree = i
else:
tree = self._scan_tree(i)
result = self._merge_trees(result, tree)
for i in exclusive_paths:
tree = self._scan_tree(i)
result = self._merge_trees(result, tree, exclusive=True)
# TODO: exclude
return result
def _paths_from_list(self, root, paths):
root = os.path.abspath(root).rstrip("/") + "/"
result = {}
for i in paths:
i = os.path.normpath(os.path.join(root, i))
key = i[len(root):]
result[key] = i
return result
def _scan_tree(self, path):
path = os.path.abspath(path)
result = {}
for root, dirs, files in os.walk(path):
for f in files:
abspath = os.path.join(root, f)
relpath = relative_path(abspath, path.rstrip("/") + "/")
result[relpath] = abspath
# include empty dirs
if root != path:
abspath = os.path.join(root, "")
relpath = relative_path(abspath, path.rstrip("/") + "/")
result[relpath] = abspath
return result
def _merge_trees(self, tree1, tree2, exclusive=False):
# tree2 has higher priority
result = tree2.copy()
all_dirs = set([os.path.dirname(i).rstrip("/") for i in result if os.path.dirname(i) != ""])
for i in tree1:
dn = os.path.dirname(i)
if exclusive:
match = False
for a in all_dirs:
if dn == a or dn.startswith("%s/" % a):
match = True
break
if match:
continue
if i in result:
continue
result[i] = tree1[i]
return result
def write_graft_points(self, file_name, h, exclude=None):
exclude = exclude or []
result = {}
seen_dirs = set()
for i in sorted(h, reverse=True):
dn = os.path.dirname(i)
if not i.endswith("/"):
result[i] = h[i]
seen_dirs.add(dn)
continue
found = False
for j in seen_dirs:
if j.startswith(dn):
found = True
break
if not found:
result[i] = h[i]
seen_dirs.add(dn)
f = open(file_name, "w")
for i in sorted(result, cmp=cmp_graft_points):
# make sure all files required for boot come first,
# otherwise there may be problems with booting (large LBA address, etc.)
found = False
for excl in exclude:
if fnmatch(i, excl):
found = True
break
if found:
continue
f.write("%s=%s\n" % (i, h[i]))
f.close()
def _is_rpm(path):
if path.endswith(".rpm"):
return True
return False
def _is_image(path):
if path.startswith("images/"):
return True
if path.startswith("isolinux/"):
return True
if path.startswith("EFI/"):
return True
if path.startswith("etc/"):
return True
if path.startswith("ppc/"):
return True
if path.endswith(".img"):
return True
if path.endswith(".ins"):
return True
return False
def cmp_graft_points(x, y):
x_is_rpm = _is_rpm(x)
y_is_rpm = _is_rpm(y)
x_is_image = _is_image(x)
y_is_image = _is_image(y)
if x_is_rpm and y_is_rpm:
return cmp(x, y)
if x_is_rpm:
return 1
if y_is_rpm:
return -1
if x_is_image and y_is_image:
return cmp(x, y)
if x_is_image:
return -1
if y_is_image:
return 1
return cmp(x, y)

67
pungi/wrappers/jigdo.py Normal file
View File

@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import kobo.log
from kobo.shortcuts import force_list
class JigdoWrapper(kobo.log.LoggingBase):
def get_jigdo_cmd(self, image, files, output_dir, cache=None, no_servers=False, report=None):
"""
files: [{"path", "label", "uri"}]
"""
cmd = ["jigdo-file", "make-template"]
cmd.append("--force") # overrides existing template
image = os.path.abspath(image)
cmd.append("--image=%s" % image)
output_dir = os.path.abspath(output_dir)
jigdo_file = os.path.join(output_dir, os.path.basename(image)) + ".jigdo"
cmd.append("--jigdo=%s" % jigdo_file)
template_file = os.path.join(output_dir, os.path.basename(image)) + ".template"
cmd.append("--template=%s" % template_file)
if cache:
cache = os.path.abspath(cache)
cmd.append("--cache=%s" % cache)
if no_servers:
cmd.append("--no-servers-section")
if report:
cmd.append("--report=%s" % report)
for i in force_list(files):
# double-slash magic; read man jigdo-file
if isinstance(i, str):
i = {"path": i}
path = os.path.abspath(i["path"]).rstrip("/") + "//"
cmd.append(path)
label = i.get("label", None)
if label is not None:
cmd.append("--label=%s=%s" % (label, path.rstrip("/")))
uri = i.get("uri", None)
if uri is not None:
cmd.append("--uri=%s=%s" % (label, uri))
return cmd

View File

@ -0,0 +1,206 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import pipes
import re
import koji
import rpmUtils.arch
from kobo.shortcuts import run
class KojiWrapper(object):
def __init__(self, profile):
self.profile = profile
# assumption: profile name equals executable name (it's a symlink -> koji)
self.executable = self.profile.replace("_", "-")
self.koji_module = __import__(self.profile)
def get_runroot_cmd(self, target, arch, command, quiet=False, use_shell=True, channel=None, packages=None, mounts=None, weight=None, task_id=True):
cmd = [self.executable, "runroot"]
if quiet:
cmd.append("--quiet")
if use_shell:
cmd.append("--use-shell")
if task_id:
cmd.append("--task-id")
if channel:
cmd.append("--channel-override=%s" % channel)
else:
cmd.append("--channel-override=runroot-local")
if weight:
cmd.append("--weight=%s" % int(weight))
if packages:
for package in packages:
cmd.append("--package=%s" % package)
if mounts:
for mount in mounts:
# directories are *not* created here
cmd.append("--mount=%s" % mount)
# IMPORTANT: all --opts have to be provided *before* args
cmd.append(target)
# i686 -> i386 etc.
arch = rpmUtils.arch.getBaseArch(arch)
cmd.append(arch)
if isinstance(command, list):
command = " ".join([pipes.quote(i) for i in command])
# HACK: remove rpmdb and yum cache
command = "rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; " + command
cmd.append(command)
return cmd
def run_runroot_cmd(self, command, log_file=None):
# runroot is blocking -> you probably want to run it in a thread
task_id = None
retcode, output = run(command, can_fail=True, logfile=log_file)
if "--task-id" in command:
task_id = int(output.splitlines()[0])
output_ends_with_eol = output.endswith("\n")
output = "\n".join(output.splitlines()[1:])
if output_ends_with_eol:
output += "\n"
result = {
"retcode": retcode,
"output": output,
"task_id": task_id,
}
return result
def get_create_image_cmd(self, name, version, target, arch, ks_file, repos, image_type="live", image_format=None, release=None, wait=True, archive=False):
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file>
# Examples:
# * name: RHEL-7.0
# * name: Satellite-6.0.1-RHEL-6
# ** -<type>.<arch>
# * version: YYYYMMDD[.n|.t].X
# * release: 1
cmd = [self.executable]
if image_type == "live":
cmd.append("spin-livecd")
elif image_type == "appliance":
cmd.append("spin-appliance")
else:
raise ValueError("Invalid image type: %s" % image_type)
if not archive:
cmd.append("--scratch")
cmd.append("--noprogress")
if wait:
cmd.append("--wait")
else:
cmd.append("--nowait")
if isinstance(repos, list):
for repo in repos:
cmd.append("--repo=%s" % repo)
else:
cmd.append("--repo=%s" % repos)
if image_format:
if image_type != "appliance":
raise ValueError("Format can be specified only for appliance images'")
supported_formats = ["raw", "qcow", "qcow2", "vmx"]
if image_format not in supported_formats:
raise ValueError("Format is not supported: %s. Supported formats: %s" % (image_format, " ".join(sorted(supported_formats))))
cmd.append("--format=%s" % image_format)
if release is not None:
cmd.append("--release=%s" % release)
# IMPORTANT: all --opts have to be provided *before* args
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
cmd.append(name)
cmd.append(version)
cmd.append(target)
# i686 -> i386 etc.
arch = rpmUtils.arch.getBaseArch(arch)
cmd.append(arch)
cmd.append(ks_file)
return cmd
def run_create_image_cmd(self, command, log_file=None):
# spin-{livecd,appliance} is blocking by default -> you probably want to run it in a thread
retcode, output = run(command, can_fail=True, logfile=log_file)
match = re.search(r"Created task: (\d+)", output)
if not match:
raise RuntimeError("Could not find task ID in output")
result = {
"retcode": retcode,
"output": output,
"task_id": int(match.groups()[0]),
}
return result
def get_image_path(self, task_id):
result = []
# XXX: hardcoded URL
koji_proxy = self.koji_module.ClientSession(self.koji_module.config.server)
task_info_list = []
task_info_list.append(koji_proxy.getTaskInfo(task_id, request=True))
task_info_list.extend(koji_proxy.getTaskChildren(task_id, request=True))
# scan parent and child tasks for certain methods
task_info = None
for i in task_info_list:
if i["method"] in ("createAppliance", "createLiveCD"):
task_info = i
break
scratch = task_info["request"][-1].get("scratch", False)
task_result = koji_proxy.getTaskResult(task_info["id"])
task_result.pop("rpmlist", None)
if scratch:
topdir = os.path.join(self.koji_module.pathinfo.work(), self.koji_module.pathinfo.taskrelpath(task_info["id"]))
else:
build = koji_proxy.getImageBuild("%(name)s-%(version)s-%(release)s" % task_result)
build["name"] = task_result["name"]
build["version"] = task_result["version"]
build["release"] = task_result["release"]
build["arch"] = task_result["arch"]
topdir = self.koji_module.pathinfo.imagebuild(build)
for i in task_result["files"]:
result.append(os.path.join(topdir, i))
return result

98
pungi/wrappers/lorax.py Normal file
View File

@ -0,0 +1,98 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
from kobo.shortcuts import force_list
class LoraxWrapper(object):
def get_lorax_cmd(self, product, version, release, repo_baseurl, output_dir, variant=None, bugurl=None, nomacboot=False, noupgrade=False, is_final=False, buildarch=None, volid=None):
cmd = ["lorax"]
cmd.append("--product=%s" % product)
cmd.append("--version=%s" % version)
cmd.append("--release=%s" % release)
for i in force_list(repo_baseurl):
if "://" not in i:
i = "file://%s" % os.path.abspath(i)
cmd.append("--source=%s" % i)
if variant is not None:
cmd.append("--variant=%s" % variant)
if bugurl is not None:
cmd.append("--bugurl=%s" % variant)
if nomacboot:
cmd.append("--nomacboot")
if noupgrade:
cmd.append("--noupgrade")
if is_final:
cmd.append("--isfinal")
if buildarch:
cmd.append("--buildarch=%s" % buildarch)
if volid:
cmd.append("--volid=%s" % volid)
output_dir = os.path.abspath(output_dir)
cmd.append(output_dir)
# TODO: workdir
return cmd
def get_buildinstall_cmd(self, product, version, release, repo_baseurl, output_dir, variant=None, bugurl=None, nomacboot=False, noupgrade=False, is_final=False, buildarch=None, volid=None, brand=None):
# RHEL 6 compatibility
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root>
brand = brand or "redhat"
# HACK: ignore provided release
release = "%s %s" % (brand, version)
bugurl = bugurl or "https://bugzilla.redhat.com"
cmd = ["/usr/lib/anaconda-runtime/buildinstall"]
cmd.append("--debug")
cmd.extend(["--version", version])
cmd.extend(["--brand", brand])
cmd.extend(["--product", product])
cmd.extend(["--release", release])
if is_final:
cmd.append("--final")
if buildarch:
cmd.extend(["--buildarch", buildarch])
if bugurl:
cmd.extend(["--bugurl", bugurl])
output_dir = os.path.abspath(output_dir)
cmd.extend(["--output", output_dir])
for i in force_list(repo_baseurl):
if "://" not in i:
i = "file://%s" % os.path.abspath(i)
cmd.append(i)
return cmd

203
pungi/wrappers/pungi.py Normal file
View File

@ -0,0 +1,203 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import errno
import os
import re
PACKAGES_RE = {
"rpm": re.compile(r"^RPM(\((?P<flags>[^\)]+)\))?: (?:file://)?(?P<path>/?[^ ]+)$"),
"srpm": re.compile(r"^SRPM(\((?P<flags>[^\)]+)\))?: (?:file://)?(?P<path>/?[^ ]+)$"),
"debuginfo": re.compile(r"^DEBUGINFO(\((?P<flags>[^\)]+)\))?: (?:file://)?(?P<path>/?[^ ]+)$"),
}
UNRESOLVED_DEPENDENCY_RE = re.compile(r"^.*Unresolvable dependency (.+) in ([^ ]+).*$")
class PungiWrapper(object):
def write_kickstart(self, ks_path, repos, groups, packages, exclude_packages=None, comps_repo=None, lookaside_repos=None, fulltree_excludes=None, multilib_blacklist=None, multilib_whitelist=None, prepopulate=None):
groups = groups or []
exclude_packages = exclude_packages or {}
lookaside_repos = lookaside_repos or {}
# repos = {name: url}
fulltree_excludes = fulltree_excludes or set()
multilib_blacklist = multilib_blacklist or set()
multilib_whitelist = multilib_whitelist or set()
ks_path = os.path.abspath(ks_path)
ks_dir = os.path.dirname(ks_path)
try:
os.makedirs(ks_dir)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
kickstart = open(ks_path, "w")
# repos
for repo_name, repo_url in repos.items() + lookaside_repos.items():
if "://" not in repo_url:
repo_url = "file://" + os.path.abspath(repo_url)
repo_str = "repo --name=%s --baseurl=%s" % (repo_name, repo_url)
# TODO: make sure pungi works when there are no comps in repodata
# XXX: if groups are ignored, langpacks are ignored too
if comps_repo and repo_name != comps_repo:
repo_str += " --ignoregroups=true"
kickstart.write(repo_str + "\n")
# %packages
kickstart.write("\n")
kickstart.write("%packages\n")
for group in sorted(groups):
kickstart.write("@%s --optional\n" % group)
for package in sorted(packages):
kickstart.write("%s\n" % package)
for package in sorted(exclude_packages):
kickstart.write("-%s\n" % package)
kickstart.write("%end\n")
# %fulltree-excludes
if fulltree_excludes:
kickstart.write("\n")
kickstart.write("%fulltree-excludes\n")
for i in sorted(fulltree_excludes):
kickstart.write("%s\n" % i)
kickstart.write("%end\n")
# %multilib-blacklist
if multilib_blacklist:
kickstart.write("\n")
kickstart.write("%multilib-blacklist\n")
for i in sorted(multilib_blacklist):
kickstart.write("%s\n" % i)
kickstart.write("%end\n")
# %multilib-whitelist
if multilib_whitelist:
kickstart.write("\n")
kickstart.write("%multilib-whitelist\n")
for i in sorted(multilib_whitelist):
kickstart.write("%s\n" % i)
kickstart.write("%end\n")
# %prepopulate
if prepopulate:
kickstart.write("\n")
kickstart.write("%prepopulate\n")
for i in sorted(prepopulate):
kickstart.write("%s\n" % i)
kickstart.write("%end\n")
kickstart.close()
def get_pungi_cmd(self, config, destdir, name, version=None, flavor=None, selfhosting=False, fulltree=False, greedy=None, nodeps=False, nodownload=True, full_archlist=False, arch=None, cache_dir=None, lookaside_repos=None, multilib_methods=None):
cmd = ["pungi-gather"]
# Gather stage
cmd.append("-G")
# path to a kickstart file
cmd.append("--config=%s" % config)
# destdir is optional in Pungi (defaults to current dir), but want it mandatory here
cmd.append("--destdir=%s" % destdir)
# name
cmd.append("--name=%s" % name)
# version; optional, defaults to datestamp
if version:
cmd.append("--ver=%s" % version)
# rhel variant; optional
if flavor:
cmd.append("--flavor=%s" % flavor)
# turn selfhosting on
if selfhosting:
cmd.append("--selfhosting")
# NPLB
if fulltree:
cmd.append("--fulltree")
greedy = greedy or "none"
cmd.append("--greedy=%s" % greedy)
if nodeps:
cmd.append("--nodeps")
# don't download packages, just print paths
if nodownload:
cmd.append("--nodownload")
if full_archlist:
cmd.append("--full-archlist")
if arch:
cmd.append("--arch=%s" % arch)
if multilib_methods:
for i in multilib_methods:
cmd.append("--multilib=%s" % i)
if cache_dir:
cmd.append("--cachedir=%s" % cache_dir)
if lookaside_repos:
for i in lookaside_repos:
cmd.append("--lookaside-repo=%s" % i)
return cmd
def get_packages(self, output):
global PACKAGES_RE
result = dict(((i, []) for i in PACKAGES_RE))
for line in output.splitlines():
for file_type, pattern in PACKAGES_RE.iteritems():
match = pattern.match(line)
if match:
item = {}
item["path"] = match.groupdict()["path"]
flags = match.groupdict()["flags"] or ""
flags = sorted([i.strip() for i in flags.split(",") if i.strip()])
item["flags"] = flags
result[file_type].append(item)
break
# no packages are filtered
return result
def get_missing_deps(self, output):
global UNRESOLVED_DEPENDENCY_RE
result = {}
for line in output.splitlines():
match = UNRESOLVED_DEPENDENCY_RE.match(line)
if match:
result.setdefault(match.group(2), set()).add(match.group(1))
return result

View File

@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
from kobo.shortcuts import force_list
class RepoclosureWrapper(object):
def __init__(self):
self.actual_id = 0
def get_repoclosure_cmd(self, config=None, arch=None, basearch=None, builddeps=False,
repos=None, lookaside=None, tempcache=False, quiet=False, newest=False, pkg=None, group=None):
cmd = ["/usr/bin/repoclosure"]
if config:
cmd.append("--config=%s" % config)
if arch:
for i in force_list(arch):
cmd.append("--arch=%s" % i)
if basearch:
cmd.append("--basearch=%s" % basearch)
if builddeps:
cmd.append("--builddeps")
if tempcache:
cmd.append("--tempcache")
if quiet:
cmd.append("--quiet")
if newest:
cmd.append("--newest")
repos = repos or {}
for repo_id, repo_path in repos.iteritems():
if "://" not in repo_path:
repo_path = "file://%s" % os.path.abspath(repo_path)
cmd.append("--repofrompath=%s,%s" % (repo_id, repo_path))
cmd.append("--repoid=%s" % repo_id)
lookaside = lookaside or {}
for repo_id, repo_path in lookaside.iteritems():
if "://" not in repo_path:
repo_path = "file://%s" % os.path.abspath(repo_path)
cmd.append("--repofrompath=%s,%s" % (repo_id, repo_path))
cmd.append("--lookaside=%s" % repo_id)
if pkg:
cmd.append("--pkg=%s" % pkg)
if group:
cmd.append("--group=%s" % group)
return cmd

262
pungi/wrappers/scm.py Normal file
View File

@ -0,0 +1,262 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import tempfile
import shutil
import pipes
import glob
import time
import kobo.log
from kobo.shortcuts import run, force_list
from pypungi.util import explode_rpm_package, makedirs
class ScmBase(kobo.log.LoggingBase):
def __init__(self, logger=None):
kobo.log.LoggingBase.__init__(self, logger=logger)
def _create_temp_dir(self, tmp_dir=None):
if tmp_dir is not None:
makedirs(tmp_dir)
return tempfile.mkdtemp(prefix="cvswrapper_", dir=tmp_dir)
def _delete_temp_dir(self, tmp_dir):
self.log_debug("Removing %s" % tmp_dir)
try:
shutil.rmtree(tmp_dir)
except OSError as ex:
self.log_warning("Error removing %s: %s" % (tmp_dir, ex))
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
raise NotImplemented
def retry_run(self, cmd, retries=5, timeout=60, **kwargs):
"""
@param cmd - cmd passed to kobo.shortcuts.run()
@param retries=5 - attempt to execute n times
@param timeout=60 - seconds before next try
@param **kwargs - args passed to kobo.shortcuts.run()
"""
for n in range(1, retries + 1):
try:
self.log_debug("Retrying execution %s/%s of '%s'" % (n, retries, cmd))
return run(cmd, **kwargs)
except RuntimeError as ex:
if n == retries:
raise ex
self.log_debug("Waiting %s seconds to retry execution of '%s'" % (timeout, cmd))
time.sleep(timeout)
raise RuntimeError("Something went wrong during execution of '%s'" % cmd)
class FileWrapper(ScmBase):
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
if scm_root:
raise ValueError("FileWrapper: 'scm_root' should be empty.")
dirs = glob.glob(scm_dir)
for i in dirs:
run("cp -a %s/* %s/" % (pipes.quote(i), pipes.quote(target_dir)))
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
if scm_root:
raise ValueError("FileWrapper: 'scm_root' should be empty.")
files = glob.glob(scm_file)
for i in files:
target_path = os.path.join(target_dir, os.path.basename(i))
shutil.copy2(i, target_path)
class CvsWrapper(ScmBase):
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
scm_dir = scm_dir.lstrip("/")
scm_branch = scm_branch or "HEAD"
tmp_dir = self._create_temp_dir(tmp_dir=tmp_dir)
self.log_debug("Exporting directory %s from CVS %s (branch %s)..." % (scm_dir, scm_root, scm_branch))
self.retry_run(["/usr/bin/cvs", "-q", "-d", scm_root, "export", "-r", scm_branch, scm_dir], workdir=tmp_dir, show_cmd=True, logfile=log_file)
# TODO: hidden files
run("cp -a %s/* %s/" % (pipes.quote(os.path.join(tmp_dir, scm_dir)), pipes.quote(target_dir)))
self._delete_temp_dir(tmp_dir)
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
scm_file = scm_file.lstrip("/")
scm_branch = scm_branch or "HEAD"
tmp_dir = self._create_temp_dir(tmp_dir=tmp_dir)
target_path = os.path.join(target_dir, os.path.basename(scm_file))
self.log_debug("Exporting file %s from CVS %s (branch %s)..." % (scm_file, scm_root, scm_branch))
self.retry_run(["/usr/bin/cvs", "-q", "-d", scm_root, "export", "-r", scm_branch, scm_file], workdir=tmp_dir, show_cmd=True, logfile=log_file)
makedirs(target_dir)
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
self._delete_temp_dir(tmp_dir)
class GitWrapper(ScmBase):
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
scm_dir = scm_dir.lstrip("/")
scm_branch = scm_branch or "master"
tmp_dir = self._create_temp_dir(tmp_dir=tmp_dir)
if "://" not in scm_root:
scm_root = "file://%s" % scm_root
self.log_debug("Exporting directory %s from git %s (branch %s)..." % (scm_dir, scm_root, scm_branch))
cmd = "/usr/bin/git archive --remote=%s %s %s | tar xf -" % (pipes.quote(scm_root), pipes.quote(scm_branch), pipes.quote(scm_dir))
self.retry_run(cmd, workdir=tmp_dir, show_cmd=True, logfile=log_file)
run("cp -a %s/* %s/" % (pipes.quote(os.path.join(tmp_dir, scm_dir)), pipes.quote(target_dir)))
self._delete_temp_dir(tmp_dir)
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
scm_file = scm_file.lstrip("/")
scm_branch = scm_branch or "master"
tmp_dir = self._create_temp_dir(tmp_dir=tmp_dir)
target_path = os.path.join(target_dir, os.path.basename(scm_file))
if "://" not in scm_root:
scm_root = "file://%s" % scm_root
self.log_debug("Exporting file %s from git %s (branch %s)..." % (scm_file, scm_root, scm_branch))
cmd = "/usr/bin/git archive --remote=%s %s %s | tar xf -" % (pipes.quote(scm_root), pipes.quote(scm_branch), pipes.quote(scm_file))
self.retry_run(cmd, workdir=tmp_dir, show_cmd=True, logfile=log_file)
makedirs(target_dir)
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
self._delete_temp_dir(tmp_dir)
class RpmScmWrapper(ScmBase):
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
# if scm_root is a list, recursively process all RPMs
if isinstance(scm_root, list):
for i in scm_root:
self.export_dir(i, scm_dir, target_dir, scm_branch, tmp_dir, log_file)
return
# if scm_root is a glob, recursively process all RPMs
rpms = glob.glob(scm_root)
if len(rpms) > 1 or (rpms and rpms[0] != scm_root):
for i in rpms:
self.export_dir(i, scm_dir, target_dir, scm_branch, tmp_dir, log_file)
return
scm_dir = scm_dir.lstrip("/")
tmp_dir = self._create_temp_dir(tmp_dir=tmp_dir)
self.log_debug("Extracting directory %s from RPM package %s..." % (scm_dir, scm_root))
explode_rpm_package(scm_root, tmp_dir)
makedirs(target_dir)
# "dir" includes the whole directory while "dir/" includes it's content
if scm_dir.endswith("/"):
run("cp -a %s/* %s/" % (pipes.quote(os.path.join(tmp_dir, scm_dir)), pipes.quote(target_dir)))
else:
run("cp -a %s %s/" % (pipes.quote(os.path.join(tmp_dir, scm_dir)), pipes.quote(target_dir)))
self._delete_temp_dir(tmp_dir)
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None, tmp_dir=None, log_file=None):
# if scm_root is a list, recursively process all RPMs
if isinstance(scm_root, list):
for i in scm_root:
self.export_file(i, scm_file, target_dir, scm_branch, tmp_dir, log_file)
return
# if scm_root is a glob, recursively process all RPMs
rpms = glob.glob(scm_root)
if len(rpms) > 1 or (rpms and rpms[0] != scm_root):
for i in rpms:
self.export_file(i, scm_file, target_dir, scm_branch, tmp_dir, log_file)
return
scm_file = scm_file.lstrip("/")
tmp_dir = self._create_temp_dir(tmp_dir=tmp_dir)
self.log_debug("Exporting file %s from RPM file %s..." % (scm_file, scm_root))
explode_rpm_package(scm_root, tmp_dir)
makedirs(target_dir)
for src in glob.glob(os.path.join(tmp_dir, scm_file)):
dst = os.path.join(target_dir, os.path.basename(src))
shutil.copy2(src, dst)
self._delete_temp_dir(tmp_dir)
def get_file_from_scm(scm_dict, target_path, logger=None):
if isinstance(scm_dict, str):
scm_type = "file"
scm_repo = None
scm_file = os.path.abspath(scm_dict)
scm_branch = None
else:
scm_type = scm_dict["scm"]
scm_repo = scm_dict["repo"]
scm_file = scm_dict["file"]
scm_branch = scm_dict.get("branch", None)
if scm_type == "file":
scm = FileWrapper(logger=logger)
elif scm_type == "cvs":
scm = CvsWrapper(logger=logger)
elif scm_type == "git":
scm = GitWrapper(logger=logger)
elif scm_type == "rpm":
scm = RpmScmWrapper(logger=logger)
else:
raise ValueError("Unknown SCM type: %s" % scm_type)
for i in force_list(scm_file):
tmp_dir = tempfile.mkdtemp(prefix="scm_checkout_")
scm.export_file(scm_repo, i, scm_branch=scm_branch, target_dir=tmp_dir)
makedirs(target_path)
run("cp -a %s/* %s/" % (pipes.quote(tmp_dir), pipes.quote(target_path)))
shutil.rmtree(tmp_dir)
def get_dir_from_scm(scm_dict, target_path, logger=None):
if isinstance(scm_dict, str):
scm_type = "file"
scm_repo = None
scm_dir = os.path.abspath(scm_dict)
scm_branch = None
else:
scm_type = scm_dict["scm"]
scm_repo = scm_dict.get("repo", None)
scm_dir = scm_dict["dir"]
scm_branch = scm_dict.get("branch", None)
if scm_type == "file":
scm = FileWrapper(logger=logger)
elif scm_type == "cvs":
scm = CvsWrapper(logger=logger)
elif scm_type == "git":
scm = GitWrapper(logger=logger)
elif scm_type == "rpm":
scm = RpmScmWrapper(logger=logger)
else:
raise ValueError("Unknown SCM type: %s" % scm_type)
tmp_dir = tempfile.mkdtemp(prefix="scm_checkout_")
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
# TODO: hidden files
makedirs(target_path)
run("cp -a %s/* %s/" % (pipes.quote(tmp_dir), pipes.quote(target_path)))
shutil.rmtree(tmp_dir)

304
pungi/wrappers/variants.py Executable file
View File

@ -0,0 +1,304 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
import os
import sys
import copy
import lxml.etree
# HACK: define cmp in python3
if sys.version_info[0] == 3:
def cmp(a, b):
return (a > b) - (a < b)
VARIANTS_DTD = "/usr/share/pungi/variants.dtd"
if not os.path.isfile(VARIANTS_DTD):
DEVEL_VARIANTS_DTD = os.path.normpath(os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "..", "share", "variants.dtd")))
msg = "Variants DTD not found: %s" % VARIANTS_DTD
if os.path.isfile(DEVEL_VARIANTS_DTD):
sys.stderr.write("%s\n" % msg)
sys.stderr.write("Using alternative DTD: %s\n" % DEVEL_VARIANTS_DTD)
VARIANTS_DTD = DEVEL_VARIANTS_DTD
else:
raise RuntimeError(msg)
class VariantsXmlParser(object):
def __init__(self, file_obj, tree_arches=None):
self.tree = lxml.etree.parse(file_obj)
self.dtd = lxml.etree.DTD(open(VARIANTS_DTD, "r"))
self.addons = {}
self.layered_products = {}
self.tree_arches = tree_arches
self.validate()
def _is_true(self, value):
if value == "true":
return True
if value == "false":
return False
raise ValueError("Invalid boolean value in variants XML: %s" % value)
def validate(self):
if not self.dtd.validate(self.tree):
errors = [str(i) for i in self.dtd.error_log.filter_from_errors()]
raise ValueError("Variants XML doesn't validate:\n%s" % "\n".join(errors))
def parse_variant_node(self, variant_node):
variant_dict = {
"id": str(variant_node.attrib["id"]),
"name": str(variant_node.attrib["name"]),
"name": str(variant_node.attrib["name"]),
"type": str(variant_node.attrib["type"]),
"arches": [str(i) for i in variant_node.xpath("arches/arch/text()")],
"groups": [],
"environments": [],
}
if self.tree_arches:
variant_dict["arches"] = [i for i in variant_dict["arches"] if i in self.tree_arches]
for grouplist_node in variant_node.xpath("groups"):
for group_node in grouplist_node.xpath("group"):
group = {
"name": str(group_node.text),
"glob": self._is_true(group_node.attrib.get("glob", "false")),
"default": None,
"uservisible": None,
}
default = group_node.attrib.get("default")
if default is not None:
group["default"] = self._is_true(default)
uservisible = group_node.attrib.get("uservisible")
if uservisible is not None:
group["uservisible"] = self._is_true(uservisible)
variant_dict["groups"].append(group)
for environments_node in variant_node.xpath("environments"):
for environment_node in environments_node.xpath("environment"):
environment = {
"name": str(environment_node.text),
"display_order": None,
}
display_order = environment_node.attrib.get("display_order")
if display_order is not None:
environment["display_order"] = int(display_order)
variant_dict["environments"].append(environment)
variant = Variant(**variant_dict)
if variant.type == "layered-product":
product_node = variant_node.xpath("product")[0]
variant.product_name = str(product_node.attrib["name"])
variant.product_version = str(product_node.attrib["version"])
variant.product_short = str(product_node.attrib["short"])
contains_optional = False
for child_node in variant_node.xpath("variants/variant"):
child_variant = self.parse_variant_node(child_node)
variant.add_variant(child_variant)
if child_variant.type == "optional":
contains_optional = True
has_optional = self._is_true(variant_node.attrib.get("has_optional", "false"))
if has_optional and not contains_optional:
optional = Variant(id="optional", name="optional", type="optional", arches=variant.arches, groups=[])
variant.add_variant(optional)
for ref in variant_node.xpath("variants/ref/@id"):
child_variant = self.parse_variant_node(self.addons[ref])
variant.add_variant(child_variant)
# XXX: top-level optional
# for ref in variant_node.xpath("variants/ref/@id"):
# variant["variants"].append(copy.deepcopy(addons[ref]))
return variant
def parse(self):
# we allow top-level addon definitions which can be referenced in variants
for variant_node in self.tree.xpath("/variants/variant[@type='addon']"):
variant_id = str(variant_node.attrib["id"])
self.addons[variant_id] = variant_node
for variant_node in self.tree.xpath("/variants/variant[@type='layered-product']"):
variant_id = str(variant_node.attrib["id"])
self.addons[variant_id] = variant_node
result = {}
for variant_node in self.tree.xpath("/variants/variant[@type='variant']"):
variant = self.parse_variant_node(variant_node)
result[variant.id] = variant
for variant_node in self.tree.xpath("/variants/variant[not(@type='variant' or @type='addon' or @type='layered-product')]"):
raise RuntimeError("Invalid variant type at the top-level: %s" % variant_node.attrib["type"])
return result
class Variant(object):
def __init__(self, id, name, type, arches, groups, environments=None):
if not id.isalnum():
raise ValueError("Variant ID must contain only alphanumeric characters: %s" % id)
environments = environments or []
self.id = id
self.name = name
self.type = type
self.arches = sorted(copy.deepcopy(arches))
self.groups = sorted(copy.deepcopy(groups), lambda x, y: cmp(x["name"], y["name"]))
self.environments = sorted(copy.deepcopy(environments), lambda x, y: cmp(x["name"], y["name"]))
self.variants = {}
self.parent = None
def __getitem__(self, name):
return self.variants[name]
def __str__(self):
return self.uid
def __cmp__(self, other):
# variant < addon, layered-product < optional
if self.type == other.type:
return cmp(self.uid, other.uid)
if self.type == "variant":
return -1
if other.type == "variant":
return 1
if self.type == "optional":
return 1
if other.type == "optional":
return -1
return cmp(self.uid, other.uid)
@property
def uid(self):
if self.parent:
return "%s-%s" % (self.parent, self.id)
return self.id
def add_variant(self, variant):
"""Add a variant object to the child variant list."""
if variant.id in self.variants:
return
if self.type != "variant":
raise RuntimeError("Only 'variant' can contain another variants.")
if variant.id == self.id:
# due to os/<variant.id> path -- addon id would conflict with parent variant id
raise RuntimeError("Child variant id must be different than parent variant id: %s" % variant.id)
# sometimes an addon or layered product can be part of multiple variants with different set of arches
arches = sorted(set(self.arches).intersection(set(variant.arches)))
if self.arches and not arches:
raise RuntimeError("%s: arch list %s does not intersect with parent arch list: %s" % (variant, variant.arches, self.arches))
variant.arches = arches
self.variants[variant.id] = variant
variant.parent = self
def get_groups(self, arch=None, types=None, recursive=False):
"""Return list of groups, default types is ["self"]"""
types = types or ["self"]
result = copy.deepcopy(self.groups)
for variant in self.get_variants(arch=arch, types=types, recursive=recursive):
if variant == self:
# XXX
continue
for group in variant.get_groups(arch=arch, types=types, recursive=recursive):
if group not in result:
result.append(group)
return result
def get_variants(self, arch=None, types=None, recursive=False):
"""
Return all variants of given arch and types.
Supported variant types:
self - include the top-level ("self") variant as well
addon
variant
optional
"""
types = types or []
result = []
if arch and arch not in self.arches + ["src"]:
return result
if "self" in types:
result.append(self)
for variant in self.variants.values():
if types and variant.type not in types:
continue
if arch and arch not in variant.arches + ["src"]:
continue
result.append(variant)
if recursive:
result.extend(variant.get_variants(types=[i for i in types if i != "self"], recursive=True))
return result
def get_addons(self, arch=None):
"""Return all 'addon' child variants. No recursion."""
return self.get_variants(arch=arch, types=["addon"], recursive=False)
def get_layered_products(self, arch=None):
"""Return all 'layered-product' child variants. No recursion."""
return self.get_variants(arch=arch, types=["layered-product"], recursive=False)
def get_optional(self, arch=None):
"""Return all 'optional' child variants. No recursion."""
return self.get_variants(arch=arch, types=["optional"], recursive=False)
def main(argv):
import optparse
parser = optparse.OptionParser("%prog <variants.xml>")
opts, args = parser.parse_args(argv)
if len(args) != 1:
parser.error("Please provide a <variants.xml> file.")
file_path = args[0]
try:
file_obj = open(file_path, "r")
except Exception as ex:
print(str(ex), file=sys.stderr)
sys.exit(1)
for top_level_variant in list(VariantsXmlParser(file_obj).parse().values()):
for i in top_level_variant.get_variants(types=["self", "variant", "addon", "layered-product", "optional"], recursive=True):
print("ID: %-30s NAME: %-40s TYPE: %-12s UID: %s" % (i.id, i.name, i.type, i))
print(" ARCHES: %s" % ", ".join(sorted(i.arches)))
for group in i.groups:
print(" GROUP: %(name)-40s GLOB: %(glob)-5s DEFAULT: %(default)-5s USERVISIBLE: %(uservisible)-5s" % group)
for env in i.environments:
print(" ENV: %(name)-40s DISPLAY_ORDER: %(display_order)s" % env)
print()
if __name__ == "__main__":
main(sys.argv[1:])

42
share/variants.dtd Normal file
View File

@ -0,0 +1,42 @@
<!ELEMENT variants (ref*,variant*)>
<!ELEMENT variant (product?,arches,groups,environments*,variants*)?>
<!ATTLIST variant
id ID #REQUIRED
name CDATA #REQUIRED
type (variant|addon|optional|layered-product) #REQUIRED
has_optional (true|false) #IMPLIED
>
<!ELEMENT product (#PCDATA)>
<!ATTLIST product
name CDATA #IMPLIED
short CDATA #IMPLIED
version CDATA #IMPLIED
>
<!ELEMENT arches (arch)+>
<!ELEMENT groups (group)+>
<!ELEMENT group (#PCDATA)>
<!ATTLIST group
glob (true|false) #IMPLIED
default (true|false) #IMPLIED
uservisible (true|false) #IMPLIED
>
<!ELEMENT environments (environment)+>
<!ELEMENT environment (#PCDATA)>
<!ATTLIST environment
display_order CDATA #IMPLIED
>
<!ELEMENT arch (#PCDATA)>
<!ELEMENT name (#PCDATA)>
<!ELEMENT ref EMPTY>
<!ATTLIST ref
id IDREF #REQUIRED
>