Initial code merge for Pungi 4.0.

This commit is contained in:
Daniel Mach 2015-02-10 08:19:34 -05:00
parent f5c6d44000
commit f116d9384f
57 changed files with 8759 additions and 10 deletions

View File

@ -1,9 +1,6 @@
include Authors
include Changelog
include AUTHORS
include COPYING
include GPL
include TESTING
include ToDo
include pungi.spec
include share/*
include doc/*
recursive-include share/*
recursive-include doc/*

75
TODO Normal file
View File

@ -0,0 +1,75 @@
Random thoughts on what needs to be done before Pungi 4.0 is completed.
Define building blocks and their metadata
=========================================
* rpms in yum repos
* comps
* kickstart trees
* isos
* kickstart trees
* bootable images
* readme files
* license(s)
Compose structure
=================
* topdir
* work, logs, etc.
* compose
* $variant
* $arch
* $content_type (rpms, isos, kickstart trees, etc.)
* actual content
Split Pungi into smaller well-defined tools
===========================================
* process initial packages
* comps
* json mapping
* ???
* grab initial package set
* yum repos
* koji instance (basically what mash does today)
* resolve deps (gather)
* self-hosting
* fulltree
* multilib
* langpacks
* create repos
* create install images
* lorax
* buildinstall
* create isos
* isos
* bootable
* hybrid
* implant md5sum
* jigdo
* checksums
* run tests
* just quick sanity tests
* notification
* email
* messagebus
Unsorted
========
* run any tasks in koji or local host
* support for non-rpm content? (java artifacts, etc.)
* docs!
* unit tests!
* use productmd for metadata: https://github.com/release-engineering/productmd/
* use next-gen tools: createrepo_c, mergerepo_c, dnf, hawkey, libcomps

340
bin/pungi Executable file
View File

@ -0,0 +1,340 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import optparse
import logging
import locale
import datetime
import getpass
import socket
import json
import pipes
here = sys.path[0]
if here != '/usr/bin':
# Git checkout
sys.path[0] = os.path.dirname(here)
from pungi import __version__
# force C locales
locale.setlocale(locale.LC_ALL, "C")
COMPOSE = None
def main():
global COMPOSE
parser = optparse.OptionParser()
parser.add_option(
"--target-dir",
metavar="PATH",
help="a compose is created under this directory",
)
parser.add_option(
"--label",
help="specify compose label (example: Snapshot-1.0); required for production composes"
)
parser.add_option(
"--no-label",
action="store_true",
default=False,
help="make a production compose without label"
)
parser.add_option(
"--supported",
action="store_true",
default=False,
help="set supported flag on media (automatically on for 'RC-x.y' labels)"
)
parser.add_option(
"--old-composes",
metavar="PATH",
dest="old_composes",
default=[],
action="append",
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
)
parser.add_option(
"--compose-dir",
metavar="PATH",
help="reuse an existing compose directory (DANGEROUS!)",
)
parser.add_option(
"--debug-mode",
action="store_true",
default=False,
help="run pungi in DEBUG mode (DANGEROUS!)",
)
parser.add_option(
"--config",
help="Config file"
)
parser.add_option(
"--skip-phase",
metavar="PHASE",
action="append",
default=[],
help="skip a compose phase",
)
parser.add_option(
"--just-phase",
metavar="PHASE",
action="append",
default=[],
help="run only a specified compose phase",
)
parser.add_option(
"--nightly",
action="store_const",
const="nightly",
dest="compose_type",
help="make a nightly compose",
)
parser.add_option(
"--test",
action="store_const",
const="test",
dest="compose_type",
help="make a test compose",
)
parser.add_option(
"--koji-event",
metavar="ID",
type="int",
help="specify a koji event for populating package set",
)
parser.add_option(
"--version",
action="store_true",
help="output version information and exit",
)
opts, args = parser.parse_args()
if opts.version:
print("pungi %s" % __version__)
sys.exit(0)
if opts.target_dir and opts.compose_dir:
parser.error("cannot specify --target-dir and --compose-dir at once")
if not opts.target_dir and not opts.compose_dir:
parser.error("please specify a target directory")
if opts.target_dir and not opts.compose_dir:
opts.target_dir = os.path.abspath(opts.target_dir)
if not os.path.isdir(opts.target_dir):
parser.error("The target directory does not exist or is not a directory: %s" % opts.target_dir)
else:
opts.compose_dir = os.path.abspath(opts.compose_dir)
if not os.path.isdir(opts.compose_dir):
parser.error("The compose directory does not exist or is not a directory: %s" % opts.compose_dir)
compose_type = opts.compose_type or "production"
if compose_type == "production" and not opts.label and not opts.no_label:
parser.error("must specify label for a production compose")
if not opts.config:
parser.error("please specify a config")
opts.config = os.path.abspath(opts.config)
# check if all requirements are met
import pungi.checks
if not pungi.checks.check():
sys.exit(1)
import kobo.conf
import kobo.log
import productmd.composeinfo.compose
if opts.label:
try:
productmd.composeinfo.compose.verify_label(opts.label)
except ValueError as ex:
parser.error(str(ex))
from pungi.compose import Compose
logger = logging.Logger("Pungi")
kobo.log.add_stderr_logger(logger)
conf = kobo.conf.PyConfigParser()
conf.load_from_file(opts.config)
if opts.target_dir:
compose_dir = Compose.get_compose_dir(opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label)
else:
compose_dir = opts.compose_dir
compose = Compose(conf, topdir=compose_dir, debug=opts.debug_mode, skip_phases=opts.skip_phase, just_phases=opts.just_phase,
old_composes=opts.old_composes, koji_event=opts.koji_event, supported=opts.supported, logger=logger)
kobo.log.add_file_logger(logger, compose.paths.log.log_file("global", "pungi.log"))
COMPOSE = compose
run_compose(compose)
def run_compose(compose):
import pungi.phases
import pungi.metadata
compose.write_status("STARTED")
compose.log_info("Host: %s" % socket.gethostname())
compose.log_info("User name: %s" % getpass.getuser())
compose.log_info("Working directory: %s" % os.getcwd())
compose.log_info("Command line: %s" % " ".join([pipes.quote(arg) for arg in sys.argv]))
compose.log_info("Compose top directory: %s" % compose.topdir)
compose.read_variants()
# dump the config file
date_str = datetime.datetime.strftime(datetime.datetime.now(), "%F_%X").replace(":", "-")
config_dump = compose.paths.log.log_file("global", "config-dump_%s" % date_str)
open(config_dump, "w").write(json.dumps(compose.conf, sort_keys=True, indent=4))
# initialize all phases
init_phase = pungi.phases.InitPhase(compose)
pkgset_phase = pungi.phases.PkgsetPhase(compose)
createrepo_phase = pungi.phases.CreaterepoPhase(compose)
buildinstall_phase = pungi.phases.BuildinstallPhase(compose)
productimg_phase = pungi.phases.ProductimgPhase(compose, pkgset_phase)
gather_phase = pungi.phases.GatherPhase(compose, pkgset_phase)
extrafiles_phase = pungi.phases.ExtraFilesPhase(compose, pkgset_phase)
createiso_phase = pungi.phases.CreateisoPhase(compose)
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
test_phase = pungi.phases.TestPhase(compose)
# check if all config options are set
errors = []
for phase in (init_phase, pkgset_phase, buildinstall_phase, productimg_phase, gather_phase, createiso_phase, test_phase):
if phase.skip():
continue
try:
phase.validate()
except ValueError as ex:
for i in str(ex).splitlines():
errors.append("%s: %s" % (phase.name.upper(), i))
if errors:
for i in errors:
compose.log_error(i)
print(i)
sys.exit(1)
# INIT phase
init_phase.start()
init_phase.stop()
# PKGSET phase
pkgset_phase.start()
pkgset_phase.stop()
# BUILDINSTALL phase - start
buildinstall_phase.start()
# GATHER phase
gather_phase.start()
gather_phase.stop()
# EXTRA_FILES phase
extrafiles_phase.start()
extrafiles_phase.stop()
# CREATEREPO phase
createrepo_phase.start()
createrepo_phase.stop()
# BUILDINSTALL phase
# must finish before PRODUCTIMG
# must finish before CREATEISO
buildinstall_phase.stop()
if not buildinstall_phase.skip():
buildinstall_phase.copy_files()
# PRODUCTIMG phase
productimg_phase.start()
productimg_phase.stop()
# write treeinfo before ISOs are created
for variant in compose.get_variants():
for arch in variant.arches + ["src"]:
pungi.metadata.write_tree_info(compose, arch, variant)
# write .discinfo and media.repo before ISOs are created
for variant in compose.get_variants(recursive=True):
if variant.type == "addon":
continue
for arch in variant.arches + ["src"]:
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
# CREATEISO and LIVEIMAGES phases
createiso_phase.start()
liveimages_phase.start()
createiso_phase.stop()
liveimages_phase.stop()
# merge checksum files
for variant in compose.get_variants(types=["variant", "layered-product"]):
for arch in variant.arches + ["src"]:
iso_dir = compose.paths.compose.iso_dir(arch, variant, create_dir=False)
if not iso_dir or not os.path.exists(iso_dir):
continue
for checksum_type in ("md5", "sha1", "sha256"):
checksum_upper = "%sSUM" % checksum_type.upper()
checksums = sorted([i for i in os.listdir(iso_dir) if i.endswith(".%s" % checksum_upper)])
fo = open(os.path.join(iso_dir, checksum_upper), "w")
for i in checksums:
data = open(os.path.join(iso_dir, i), "r").read()
fo.write(data)
pungi.metadata.write_compose_info(compose)
compose.im.dump(compose.paths.compose.metadata("images.json")
# TEST phase
test_phase.start()
test_phase.stop()
# create a latest symlink
compose_dir = os.path.basename(compose.topdir)
symlink_name = "latest-%s-%s" % (compose.conf["product_short"], ".".join(compose.conf["product_version"].split(".")[:-1]))
if compose.conf["product_is_layered"]:
symlink_name += "-%s-%s" % (compose.conf["base_product_short"], compose.conf["base_product_version"])
symlink = os.path.join(compose.topdir, "..", symlink_name)
try:
os.unlink(symlink)
except OSError as ex:
if ex.errno != 2:
raise
try:
os.symlink(compose_dir, symlink)
except Exception as ex:
print("ERROR: couldn't create latest symlink: %s" % ex)
compose.log_info("Compose finished: %s" % compose.topdir)
compose.write_status("FINISHED")
if __name__ == "__main__":
try:
main()
except (Exception, KeyboardInterrupt) as ex:
if COMPOSE:
tb_path = COMPOSE.paths.log.log_file("global", "traceback")
COMPOSE.log_error("Exception: %s" % ex)
COMPOSE.log_error("Extended traceback in: %s" % tb_path)
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
COMPOSE.write_status("DOOMED")
import kobo.tback
open(tb_path, "w").write(kobo.tback.Traceback().get_traceback())
else:
print("Exception: %s" % ex)
sys.stdout.flush()
sys.stderr.flush()
raise

View File

@ -45,7 +45,7 @@ def get_valid_multilib_arches(tree_arch):
multilib_arch = get_multilib_arch(yum_arch)
if not multilib_arch:
return []
return [ i for i in rpmUtils.arch.getArchList(multilib_arch) if i not in ("noarch", "src") ]
return [i for i in rpmUtils.arch.getArchList(multilib_arch) if i not in ("noarch", "src")]
def get_valid_arches(tree_arch, multilib=True, add_noarch=True, add_src=False):

123
pungi/checks.py Normal file
View File

@ -0,0 +1,123 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os.path
tools = [
("isomd5sum", "/usr/bin/implantisomd5"),
("isomd5sum", "/usr/bin/checkisomd5"),
("jigdo", "/usr/bin/jigdo-lite"),
("genisoimage", "/usr/bin/genisoimage"),
("gettext", "/usr/bin/msgfmt"),
("syslinux", "/usr/bin/isohybrid"),
("yum-utils", "/usr/bin/createrepo"),
("yum-utils", "/usr/bin/mergerepo"),
("yum-utils", "/usr/bin/repoquery"),
("git", "/usr/bin/git"),
("cvs", "/usr/bin/cvs"),
("gettext", "/usr/bin/msgfmt"),
]
imports = [
("kobo", "kobo"),
("kobo-rpmlib", "kobo.rpmlib"),
("python-lxml", "lxml"),
("koji", "koji"),
("productmd", "productmd"),
]
def check():
fail = False
# Check python modules
for package, module in imports:
try:
__import__(module)
except ImportError:
print("Module '%s' doesn't exist. Install package '%s'." % (module, package))
fail = True
# Check tools
for package, path in tools:
if not os.path.exists(path):
print("Program '%s' doesn't exist. Install package '%s'." % (path, package))
fail = True
return not fail
def validate_options(conf, valid_options):
errors = []
for i in valid_options:
name = i["name"]
value = conf.get(name)
if i.get("deprecated", False):
if name in conf:
errors.append("Deprecated config option: %s; %s" % (name, i["comment"]))
continue
if name not in conf:
if not i.get("optional", False):
errors.append("Config option not set: %s" % name)
continue
# verify type
if "expected_types" in i:
etypes = i["expected_types"]
if not isinstance(etypes, list) and not isinstance(etypes, tuple):
raise TypeError("The 'expected_types' value must be wrapped in a list: %s" % i)
found = False
for etype in etypes:
if isinstance(value, etype):
found = True
break
if not found:
errors.append("Config option '%s' has invalid type: %s. Expected: %s." % (name, str(type(value)), etypes))
continue
# verify value
if "expected_values" in i:
evalues = i["expected_values"]
if not isinstance(evalues, list) and not isinstance(evalues, tuple):
raise TypeError("The 'expected_values' value must be wrapped in a list: %s" % i)
found = False
for evalue in evalues:
if value == evalue:
found = True
break
if not found:
errors.append("Config option '%s' has invalid value: %s. Expected: %s." % (name, value, evalues))
continue
if "requires" in i:
for func, requires in i["requires"]:
if func(value):
for req in requires:
if req not in conf:
errors.append("Config option %s=%s requires %s which is not set" % (name, value, req))
if "conflicts" in i:
for func, conflicts in i["conflicts"]:
if func(value):
for con in conflicts:
if con in conf:
errors.append("Config option %s=%s conflicts with option %s" % (name, value, con))
return errors

241
pungi/compose.py Normal file
View File

@ -0,0 +1,241 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__all__ = (
"Compose",
)
import errno
import os
import time
import tempfile
import shutil
import kobo.log
from productmd import ComposeInfo, ImageManifest
from pypungi.wrappers.variants import VariantsXmlParser
from pypungi.paths import Paths
from pypungi.wrappers.scm import get_file_from_scm
from pypungi.util import makedirs
from pypungi.metadata import compose_to_composeinfo
def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, compose_respin=None, compose_label=None, already_exists_callbacks=None):
topdir = os.path.abspath(topdir)
already_exists_callbacks = already_exists_callbacks or []
# create an incomplete ComposeInfo to generate compose ID
ci = ComposeInfo()
ci.product.name = conf["product_name"]
ci.product.short = conf["product_short"]
ci.product.version = conf["product_version"]
ci.product.is_layered = bool(conf.get("product_is_layered", False))
if ci.product.is_layered:
ci.base_product.name = conf["base_product_name"]
ci.base_product.short = conf["base_product_short"]
ci.base_product.version = conf["base_product_version"]
ci.compose.label = compose_label
ci.compose.type = compose_type
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
ci.compose.respin = compose_respin or 0
# HACK - add topdir for callbacks
ci.topdir = topdir
while 1:
ci.compose.id = ci.create_compose_id()
compose_dir = os.path.join(topdir, ci.compose.id)
exists = False
# TODO: callbacks to determine if a composeid was already used
# for callback in already_exists_callbacks:
# if callback(data):
# exists = True
# break
# already_exists_callbacks fallback: does target compose_dir exist?
if not exists:
try:
os.makedirs(compose_dir)
except OSError as ex:
if ex.errno == errno.EEXIST:
exists = True
else:
raise
if exists:
ci.compose.respin += 1
continue
break
open(os.path.join(compose_dir, "COMPOSE_ID"), "w").write(ci.compose.id)
work_dir = os.path.join(compose_dir, "work", "global")
makedirs(work_dir)
ci.dump(os.path.join(work_dir, "composeinfo-base.json"))
return compose_dir
class Compose(kobo.log.LoggingBase):
def __init__(self, conf, topdir, debug=False, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None):
kobo.log.LoggingBase.__init__(self, logger)
# TODO: check if minimal conf values are set
self.conf = conf
self.variants = {}
self.topdir = os.path.abspath(topdir)
self.skip_phases = skip_phases or []
self.just_phases = just_phases or []
self.old_composes = old_composes or []
self.koji_event = koji_event
# intentionally upper-case (visible in the code)
self.DEBUG = debug
# path definitions
self.paths = Paths(self)
# to provide compose_id, compose_date and compose_respin
self.ci_base = ComposeInfo()
self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))
self.supported = supported
if self.compose_label and self.compose_label.split("-")[0] == "RC":
self.log_info("Automatically setting 'supported' flag for a Release Candidate (%s) compose." % self.compose_label)
self.supported = True
self.im = ImageManifest()
if self.DEBUG:
try:
self.im.load(self.paths.compose.metadata("images.json"))
except RuntimeError:
pass
self.im.compose.id = self.compose_id
self.im.compose.type = self.compose_type
self.im.compose.date = self.compose_date
self.im.compose.respin = self.compose_respin
self.im.metadata_path = self.paths.compose.metadata()
get_compose_dir = staticmethod(get_compose_dir)
def __getitem__(self, name):
return self.variants[name]
@property
def compose_id(self):
return self.ci_base.compose.id
@property
def compose_date(self):
return self.ci_base.compose.date
@property
def compose_respin(self):
return self.ci_base.compose.respin
@property
def compose_type(self):
return self.ci_base.compose.type
@property
def compose_type_suffix(self):
return self.ci_base.compose.type_suffix
@property
def compose_label(self):
return self.ci_base.compose.label
@property
def has_comps(self):
return bool(self.conf.get("comps_file", False))
@property
def config_dir(self):
return os.path.dirname(self.conf._open_file or "")
def read_variants(self):
# TODO: move to phases/init ?
variants_file = self.paths.work.variants_file(arch="global")
msg = "Writing variants file: %s" % variants_file
if self.DEBUG and os.path.isfile(variants_file):
self.log_warning("[SKIP ] %s" % msg)
else:
scm_dict = self.conf["variants_file"]
if isinstance(scm_dict, dict):
file_name = os.path.basename(scm_dict["file"])
if scm_dict["scm"] == "file":
scm_dict["file"] = os.path.join(self.config_dir, os.path.basename(scm_dict["file"]))
else:
file_name = os.path.basename(scm_dict)
scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict))
self.log_debug(msg)
tmp_dir = tempfile.mkdtemp(prefix="variants_file_")
get_file_from_scm(scm_dict, tmp_dir, logger=self._logger)
shutil.copy2(os.path.join(tmp_dir, file_name), variants_file)
shutil.rmtree(tmp_dir)
file_obj = open(variants_file, "r")
tree_arches = self.conf.get("tree_arches", None)
self.variants = VariantsXmlParser(file_obj, tree_arches).parse()
# populate ci_base with variants - needed for layered-products (compose_id)
self.ci_base = compose_to_composeinfo(self)
def get_variants(self, types=None, arch=None, recursive=False):
result = []
types = types or ["variant", "optional", "addon", "layered-product"]
for i in self.variants.values():
if i.type in types:
if arch and arch not in i.arches:
continue
result.append(i)
result.extend(i.get_variants(types=types, arch=arch, recursive=recursive))
return sorted(set(result))
def get_arches(self):
result = set()
tree_arches = self.conf.get("tree_arches", None)
for variant in self.get_variants():
for arch in variant.arches:
if tree_arches:
if arch in tree_arches:
result.add(arch)
else:
result.add(arch)
return sorted(result)
def write_status(self, stat_msg):
if stat_msg not in ("STARTED", "FINISHED", "DOOMED"):
self.log_warning("Writing nonstandard compose status: %s" % stat_msg)
old_status = self.get_status()
if stat_msg == old_status:
return
if old_status == "FINISHED":
msg = "Could not modify a FINISHED compose: %s" % self.topdir
self.log_error(msg)
raise RuntimeError(msg)
open(os.path.join(self.topdir, "STATUS"), "w").write(stat_msg + "\n")
def get_status(self):
path = os.path.join(self.topdir, "STATUS")
if not os.path.isfile(path):
return
return open(path, "r").read().strip()

View File

View File

@ -0,0 +1,97 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
The .discinfo file contains metadata about media.
Following fields are part of the .discinfo file,
one record per line:
- timestamp
- release
- architecture
- disc number (optional)
"""
__all__ = (
"read_discinfo",
"write_discinfo",
"write_media_repo",
)
import time
def write_discinfo(file_path, description, arch, disc_numbers=None, timestamp=None):
"""
Write a .discinfo file:
"""
disc_numbers = disc_numbers or ["ALL"]
if not isinstance(disc_numbers, list):
raise TypeError("Invalid type: disc_numbers type is %s; expected: <list>" % type(disc_numbers))
if not timestamp:
timestamp = "%f" % time.time()
f = open(file_path, "w")
f.write("%s\n" % timestamp)
f.write("%s\n" % description)
f.write("%s\n" % arch)
if disc_numbers:
f.write("%s\n" % ",".join([str(i) for i in disc_numbers]))
f.close()
return timestamp
def read_discinfo(file_path):
result = {}
f = open(file_path, "r")
result["timestamp"] = f.readline().strip()
result["description"] = f.readline().strip()
result["arch"] = f.readline().strip()
disc_numbers = f.readline().strip()
if not disc_numbers:
result["disc_numbers"] = None
elif disc_numbers == "ALL":
result["disc_numbers"] = ["ALL"]
else:
result["disc_numbers"] = [int(i) for i in disc_numbers.split(",")]
return result
def write_media_repo(file_path, description, timestamp=None):
"""
Write media.repo file for the disc to be used on installed system.
PackageKit uses this.
"""
if not timestamp:
raise
timestamp = "%f" % time.time()
data = [
"[InstallMedia]",
"name=%s" % description,
"mediaid=%s" % timestamp,
"metadata_expire=-1",
"gpgcheck=0",
"cost=500",
"",
]
repo_file = open(file_path, "w")
repo_file.write("\n".join(data))
repo_file.close()
return timestamp

View File

@ -1,4 +1,6 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
@ -12,12 +14,14 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import time
import yum
from ConfigParser import SafeConfigParser
class Config(SafeConfigParser):
def __init__(self):
SafeConfigParser.__init__(self)

View File

@ -1,4 +1,4 @@
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify

View File

@ -1,6 +1,20 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Pungi adds several new sections to kickstarts.

315
pungi/linker.py Normal file
View File

@ -0,0 +1,315 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import errno
import os
import shutil
import kobo.log
from kobo.shortcuts import relative_path
from kobo.threads import WorkerThread, ThreadPool
from pypungi.util import makedirs
class LinkerPool(ThreadPool):
def __init__(self, link_type="hardlink-or-copy", logger=None):
ThreadPool.__init__(self, logger)
self.link_type = link_type
self.linker = Linker()
class LinkerThread(WorkerThread):
def process(self, item, num):
src, dst = item
if (num % 100 == 0) or (num == self.pool.queue_total):
self.pool.log_debug("Linked %s out of %s packages" % (num, self.pool.queue_total))
self.pool.linker.link(src, dst, link_type=self.pool.link_type)
class Linker(kobo.log.LoggingBase):
def __init__(self, ignore_existing=False, always_copy=None, test=False, logger=None):
kobo.log.LoggingBase.__init__(self, logger=logger)
self.ignore_existing = ignore_existing
self.always_copy = always_copy or []
self.test = test
self._precache = {}
self._inode_map = {}
def _is_same_type(self, path1, path2):
if not os.path.islink(path1) == os.path.islink(path2):
return False
if not os.path.isdir(path1) == os.path.isdir(path2):
return False
if not os.path.isfile(path1) == os.path.isfile(path2):
return False
return True
def _is_same(self, path1, path2):
if self.ignore_existing:
return True
if path1 == path2:
return True
if os.path.islink(path2) and not os.path.exists(path2):
return True
if os.path.getsize(path1) != os.path.getsize(path2):
return False
if int(os.path.getmtime(path1)) != int(os.path.getmtime(path2)):
return False
return True
def symlink(self, src, dst, relative=True):
if src == dst:
return
old_src = src
if relative:
src = relative_path(src, dst)
msg = "Symlinking %s -> %s" % (dst, src)
if self.test:
self.log_info("TEST: %s" % msg)
return
self.log_info(msg)
try:
os.symlink(src, dst)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
if os.path.islink(dst) and self._is_same(old_src, dst):
if os.readlink(dst) != src:
raise
self.log_debug("The same file already exists, skipping symlink %s -> %s" % (dst, src))
else:
raise
def hardlink_on_dest(self, src, dst):
if src == dst:
return
if os.path.exists(src):
st = os.stat(src)
file_name = os.path.basename(src)
precache_key = (file_name, int(st.st_mtime), st.st_size)
if precache_key in self._precache:
self.log_warning("HIT %s" % str(precache_key))
cached_path = self._precache[precache_key]["path"]
self.hardlink(cached_path, dst)
return True
return False
def hardlink(self, src, dst):
if src == dst:
return
msg = "Hardlinking %s to %s" % (src, dst)
if self.test:
self.log_info("TEST: %s" % msg)
return
self.log_info(msg)
try:
os.link(src, dst)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
if self._is_same(src, dst):
if not self._is_same_type(src, dst):
self.log_error("File %s already exists but has different type than %s" % (dst, src))
raise
self.log_debug("The same file already exists, skipping hardlink %s to %s" % (src, dst))
else:
raise
def copy(self, src, dst):
if src == dst:
return True
if os.path.islink(src):
msg = "Copying symlink %s to %s" % (src, dst)
else:
msg = "Copying file %s to %s" % (src, dst)
if self.test:
self.log_info("TEST: %s" % msg)
return
self.log_info(msg)
if os.path.exists(dst):
if self._is_same(src, dst):
if not self._is_same_type(src, dst):
self.log_error("File %s already exists but has different type than %s" % (dst, src))
raise OSError(errno.EEXIST, "File exists")
self.log_debug("The same file already exists, skipping copy %s to %s" % (src, dst))
return
else:
raise OSError(errno.EEXIST, "File exists")
if os.path.islink(src):
if not os.path.islink(dst):
os.symlink(os.readlink(src), dst)
return
return
src_stat = os.stat(src)
src_key = (src_stat.st_dev, src_stat.st_ino)
if src_key in self._inode_map:
# (st_dev, st_ino) found in the mapping
self.log_debug("Harlink detected, hardlinking in destination %s to %s" % (self._inode_map[src_key], dst))
os.link(self._inode_map[src_key], dst)
return
# BEWARE: shutil.copy2 automatically *rewrites* existing files
shutil.copy2(src, dst)
self._inode_map[src_key] = dst
if not self._is_same(src, dst):
self.log_error("File %s doesn't match the copied file %s" % (src, dst))
# XXX:
raise OSError(errno.EEXIST, "File exists")
def _put_into_cache(self, path):
def get_stats(item):
return [item[i] for i in ("st_dev", "st_ino", "st_mtime", "st_size")]
filename = os.path.basename(path)
st = os.stat(path)
item = {
"st_dev": st.st_dev,
"st_ino": st.st_ino,
"st_mtime": int(st.st_mtime),
"st_size": st.st_size,
"path": path,
}
precache_key = (filename, int(st.st_mtime), st.st_size)
if precache_key in self._precache:
if get_stats(self._precache[precache_key]) != get_stats(item):
# Files have same mtime and size but device
# or/and inode is/are different.
self.log_debug("Caching failed, files are different: %s, %s"
% (path, self._precache[precache_key]["path"]))
return False
self._precache[precache_key] = item
return True
def scan(self, path):
"""Recursively scan a directory and populate the cache."""
msg = "Scanning directory: %s" % path
self.log_debug("[BEGIN] %s" % msg)
for dirpath, _, filenames in os.walk(path):
for filename in filenames:
path = os.path.join(dirpath, filename)
self._put_into_cache(path)
self.log_debug("[DONE ] %s" % msg)
def _link_file(self, src, dst, link_type):
if link_type == "hardlink":
if not self.hardlink_on_dest(src, dst):
self.hardlink(src, dst)
elif link_type == "copy":
self.copy(src, dst)
elif link_type in ("symlink", "abspath-symlink"):
if os.path.islink(src):
self.copy(src, dst)
else:
relative = link_type != "abspath-symlink"
self.symlink(src, dst, relative)
elif link_type == "hardlink-or-copy":
if not self.hardlink_on_dest(src, dst):
src_stat = os.stat(src)
dst_stat = os.stat(os.path.dirname(dst))
if src_stat.st_dev == dst_stat.st_dev:
self.hardlink(src, dst)
else:
self.copy(src, dst)
else:
raise ValueError("Unknown link_type: %s" % link_type)
def link(self, src, dst, link_type="hardlink-or-copy", scan=True):
"""Link directories recursively."""
if os.path.isfile(src) or os.path.islink(src):
self._link_file(src, dst, link_type)
return
if os.path.isfile(dst):
raise OSError(errno.EEXIST, "File exists")
if not self.test:
if not os.path.exists(dst):
makedirs(dst)
shutil.copystat(src, dst)
for i in os.listdir(src):
src_path = os.path.join(src, i)
dst_path = os.path.join(dst, i)
self.link(src_path, dst_path, link_type)
return
if scan:
self.scan(dst)
self.log_debug("Start linking")
src = os.path.abspath(src)
for dirpath, dirnames, filenames in os.walk(src):
rel_path = dirpath[len(src):].lstrip("/")
dst_path = os.path.join(dst, rel_path)
# Dir check and creation
if not os.path.isdir(dst_path):
if os.path.exists(dst_path):
# At destination there is a file with same name but
# it is not a directory.
self.log_error("Cannot create directory %s" % dst_path)
dirnames = [] # noqa
continue
os.mkdir(dst_path)
# Process all files in directory
for filename in filenames:
path = os.path.join(dirpath, filename)
st = os.stat(path)
# Check cache
# Same file already exists at a destination dir =>
# Create the new file by hardlink to the cached one.
precache_key = (filename, int(st.st_mtime), st.st_size)
full_dst_path = os.path.join(dst_path, filename)
if precache_key in self._precache:
# Cache hit
cached_path = self._precache[precache_key]["path"]
self.log_debug("Cache HIT for %s [%s]" % (path, cached_path))
if cached_path != full_dst_path:
self.hardlink(cached_path, full_dst_path)
else:
self.log_debug("Files are same, skip hardlinking")
continue
# Cache miss
# Copy the new file and put it to the cache.
try:
self.copy(path, full_dst_path)
except Exception as ex:
print(ex)
print(path, open(path, "r").read())
print(full_dst_path, open(full_dst_path, "r").read())
print(os.stat(path))
print(os.stat(full_dst_path))
os.utime(full_dst_path, (st.st_atime, int(st.st_mtime)))
self._put_into_cache(full_dst_path)

133
pungi/media_split.py Normal file
View File

@ -0,0 +1,133 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
SIZE_UNITS = {
"b": 1,
"k": 1024,
"M": 1024 ** 2,
"G": 1024 ** 3,
}
def convert_media_size(size):
if isinstance(size, str):
if size[-1] in SIZE_UNITS:
num = int(size[:-1])
units = size[-1]
else:
num = int(size)
units = "b"
result = num * SIZE_UNITS[units]
else:
result = int(size)
if result <= 0:
raise ValueError("Media size must be a positive number: %s" % size)
return result
def convert_file_size(size, block_size=2048):
"""round file size to block"""
blocks = int(size / block_size)
if size % block_size:
blocks += 1
return blocks * block_size
class MediaSplitter(object):
def __init__(self, media_size):
self.media_size = convert_media_size(media_size)
self.files = [] # to preserve order
self.file_sizes = {}
self.sticky_files = set()
def add_file(self, name, size, sticky=False):
name = os.path.normpath(name)
size = int(size)
old_size = self.file_sizes.get(name, None)
if old_size is None:
self.files.append(name)
self.file_sizes[name] = size
elif old_size != size:
raise ValueError("File size mismatch; file: %s; sizes: %s vs %s" % (name, old_size, size))
elif size > self.media_size:
raise ValueError("File is larger than media size: %s" % name)
if sticky:
self.sticky_files.add(name)
'''
def load(self, file_name):
f = open(file_name, "r")
for line in f:
line = line.strip()
if not line:
continue
name, size = line.split(" ")
self.add_file(name, size)
f.close()
def scan(self, pattern):
for i in glob.glob(pattern):
self.add_file(i, os.path.getsize(i))
def dump(self, file_name):
f = open(file_name, "w")
for name in self.files:
f.write("%s %s\n" % (os.path.basename(name), self.file_sizes[name]))
f.close()
'''
@property
def total_size(self):
return sum(self.file_sizes.values())
@property
def total_size_in_blocks(self):
return sum([convert_file_size(i) for i in list(self.file_sizes.values())])
def split(self, first_disk=0, all_disks=0):
all_files = []
sticky_files = []
sticky_files_size = 0
for name in self.files:
if name in self.sticky_files:
sticky_files.append(name)
sticky_files_size += convert_file_size(self.file_sizes[name])
else:
all_files.append(name)
disks = []
disk = {}
while all_files:
name = all_files.pop(0)
size = convert_file_size(self.file_sizes[name])
if not disks or disk["size"] + size > self.media_size:
disk = {"size": 0, "files": []}
disks.append(disk)
disk["files"].extend(sticky_files)
disk["size"] += sticky_files_size
disk["files"].append(name)
disk["size"] += convert_file_size(self.file_sizes[name])
return disks

306
pungi/metadata.py Normal file
View File

@ -0,0 +1,306 @@
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
import time
import productmd.composeinfo
import productmd.treeinfo
import productmd.treeinfo.product
from productmd import get_major_version
from kobo.shortcuts import relative_path
from pypungi.compose_metadata.discinfo import write_discinfo as create_discinfo
from pypungi.compose_metadata.discinfo import write_media_repo as create_media_repo
def get_description(compose, variant, arch):
if "product_discinfo_description" in compose.conf:
result = compose.conf["product_discinfo_description"]
elif variant.type == "layered-product":
# we need to make sure the layered product behaves as it was composed separately
result = "%s %s for %s %s" % (variant.product_name, variant.product_version, compose.conf["product_name"], get_major_version(compose.conf["product_version"]))
else:
result = "%s %s" % (compose.conf["product_name"], compose.conf["product_version"])
if compose.conf.get("is_layered", False):
result += "for %s %s" % (compose.conf["base_product_name"], compose.conf["base_product_version"])
result = result % {"variant_name": variant.name, "arch": arch}
return result
def write_discinfo(compose, arch, variant):
if variant.type == "addon":
return
os_tree = compose.paths.compose.os_tree(arch, variant)
path = os.path.join(os_tree, ".discinfo")
# description = get_volid(compose, arch, variant)
description = get_description(compose, variant, arch)
return create_discinfo(path, description, arch)
def write_media_repo(compose, arch, variant, timestamp=None):
if variant.type == "addon":
return
os_tree = compose.paths.compose.os_tree(arch, variant)
path = os.path.join(os_tree, "media.repo")
# description = get_volid(compose, arch, variant)
description = get_description(compose, variant, arch)
return create_media_repo(path, description, timestamp)
def compose_to_composeinfo(compose):
ci = productmd.composeinfo.ComposeInfo()
# compose
ci.compose.id = compose.compose_id
ci.compose.type = compose.compose_type
ci.compose.date = compose.compose_date
ci.compose.respin = compose.compose_respin
ci.compose.label = compose.compose_label
# product
ci.product.name = compose.conf["product_name"]
ci.product.version = compose.conf["product_version"]
ci.product.short = compose.conf["product_short"]
ci.product.is_layered = compose.conf.get("product_is_layered", False)
# base product
if ci.product.is_layered:
ci.base_product.name = compose.conf["base_product_name"]
ci.base_product.version = compose.conf["base_product_version"]
ci.base_product.short = compose.conf["base_product_short"]
def dump_variant(variant, parent=None):
var = productmd.composeinfo.Variant(ci)
tree_arches = compose.conf.get("tree_arches", None)
if tree_arches and not (set(variant.arches) & set(tree_arches)):
return None