rename binaries
rename the pungi binary to pungi-koji since it does is tasks in koji rename pungi-gather to pungi as it is the standalone old pungi binary there is scripts that expect pungi to be the old pungi, the new binary is not yet in use, pungi-koji semes to make sense, open to better ideas
This commit is contained in:
parent
320724ed98
commit
a3158ec144
629
bin/pungi
629
bin/pungi
@ -1,340 +1,325 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
#!/usr/bin/python -tt
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import optparse
|
||||
import logging
|
||||
import locale
|
||||
import datetime
|
||||
import getpass
|
||||
import socket
|
||||
import json
|
||||
import pipes
|
||||
|
||||
here = sys.path[0]
|
||||
if here != '/usr/bin':
|
||||
# Git checkout
|
||||
sys.path[0] = os.path.dirname(here)
|
||||
|
||||
from pungi import __version__
|
||||
|
||||
|
||||
# force C locales
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
|
||||
|
||||
COMPOSE = None
|
||||
|
||||
import pungi.gather
|
||||
import pungi.config
|
||||
import pungi.ks
|
||||
import subprocess
|
||||
|
||||
def main():
|
||||
global COMPOSE
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option(
|
||||
"--target-dir",
|
||||
metavar="PATH",
|
||||
help="a compose is created under this directory",
|
||||
)
|
||||
parser.add_option(
|
||||
"--label",
|
||||
help="specify compose label (example: Snapshot-1.0); required for production composes"
|
||||
)
|
||||
parser.add_option(
|
||||
"--no-label",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="make a production compose without label"
|
||||
)
|
||||
parser.add_option(
|
||||
"--supported",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="set supported flag on media (automatically on for 'RC-x.y' labels)"
|
||||
)
|
||||
parser.add_option(
|
||||
"--old-composes",
|
||||
metavar="PATH",
|
||||
dest="old_composes",
|
||||
default=[],
|
||||
action="append",
|
||||
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
|
||||
)
|
||||
parser.add_option(
|
||||
"--compose-dir",
|
||||
metavar="PATH",
|
||||
help="reuse an existing compose directory (DANGEROUS!)",
|
||||
)
|
||||
parser.add_option(
|
||||
"--debug-mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="run pungi in DEBUG mode (DANGEROUS!)",
|
||||
)
|
||||
parser.add_option(
|
||||
"--config",
|
||||
help="Config file"
|
||||
)
|
||||
parser.add_option(
|
||||
"--skip-phase",
|
||||
metavar="PHASE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="skip a compose phase",
|
||||
)
|
||||
parser.add_option(
|
||||
"--just-phase",
|
||||
metavar="PHASE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="run only a specified compose phase",
|
||||
)
|
||||
parser.add_option(
|
||||
"--nightly",
|
||||
action="store_const",
|
||||
const="nightly",
|
||||
dest="compose_type",
|
||||
help="make a nightly compose",
|
||||
)
|
||||
parser.add_option(
|
||||
"--test",
|
||||
action="store_const",
|
||||
const="test",
|
||||
dest="compose_type",
|
||||
help="make a test compose",
|
||||
)
|
||||
parser.add_option(
|
||||
"--koji-event",
|
||||
metavar="ID",
|
||||
type="int",
|
||||
help="specify a koji event for populating package set",
|
||||
)
|
||||
parser.add_option(
|
||||
"--version",
|
||||
action="store_true",
|
||||
help="output version information and exit",
|
||||
)
|
||||
config = pungi.config.Config()
|
||||
|
||||
opts, args = parser.parse_args()
|
||||
(opts, args) = get_arguments(config)
|
||||
|
||||
if opts.version:
|
||||
print("pungi %s" % __version__)
|
||||
sys.exit(0)
|
||||
|
||||
if opts.target_dir and opts.compose_dir:
|
||||
parser.error("cannot specify --target-dir and --compose-dir at once")
|
||||
|
||||
if not opts.target_dir and not opts.compose_dir:
|
||||
parser.error("please specify a target directory")
|
||||
|
||||
if opts.target_dir and not opts.compose_dir:
|
||||
opts.target_dir = os.path.abspath(opts.target_dir)
|
||||
if not os.path.isdir(opts.target_dir):
|
||||
parser.error("The target directory does not exist or is not a directory: %s" % opts.target_dir)
|
||||
else:
|
||||
opts.compose_dir = os.path.abspath(opts.compose_dir)
|
||||
if not os.path.isdir(opts.compose_dir):
|
||||
parser.error("The compose directory does not exist or is not a directory: %s" % opts.compose_dir)
|
||||
|
||||
compose_type = opts.compose_type or "production"
|
||||
if compose_type == "production" and not opts.label and not opts.no_label:
|
||||
parser.error("must specify label for a production compose")
|
||||
|
||||
if not opts.config:
|
||||
parser.error("please specify a config")
|
||||
opts.config = os.path.abspath(opts.config)
|
||||
|
||||
# check if all requirements are met
|
||||
import pungi.checks
|
||||
if not pungi.checks.check():
|
||||
sys.exit(1)
|
||||
|
||||
import kobo.conf
|
||||
import kobo.log
|
||||
import productmd.composeinfo.compose
|
||||
|
||||
if opts.label:
|
||||
# You must be this high to ride if you're going to do root tasks
|
||||
if os.geteuid () != 0 and (opts.do_all or opts.do_buildinstall):
|
||||
print >> sys.stderr, "You must run pungi as root"
|
||||
return 1
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
try:
|
||||
productmd.composeinfo.compose.verify_label(opts.label)
|
||||
except ValueError as ex:
|
||||
parser.error(str(ex))
|
||||
selinux = subprocess.Popen('/usr/sbin/getenforce',
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=open('/dev/null', 'w')).communicate()[0].strip('\n')
|
||||
if selinux == 'Enforcing':
|
||||
print >> sys.stdout, "WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled."
|
||||
print >> sys.stdout, "Consider running with setenforce 0."
|
||||
except:
|
||||
pass
|
||||
|
||||
from pungi.compose import Compose
|
||||
# Set up the kickstart parser and pass in the kickstart file we were handed
|
||||
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
||||
|
||||
logger = logging.Logger("Pungi")
|
||||
kobo.log.add_stderr_logger(logger)
|
||||
if opts.sourceisos:
|
||||
config.set('pungi', 'arch', 'source')
|
||||
|
||||
conf = kobo.conf.PyConfigParser()
|
||||
conf.load_from_file(opts.config)
|
||||
for part in ksparser.handler.partition.partitions:
|
||||
if part.mountpoint == 'iso':
|
||||
config.set('pungi', 'cdsize', str(part.size))
|
||||
|
||||
config.set('pungi', 'force', str(opts.force))
|
||||
|
||||
if opts.target_dir:
|
||||
compose_dir = Compose.get_compose_dir(opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label)
|
||||
else:
|
||||
compose_dir = opts.compose_dir
|
||||
|
||||
compose = Compose(conf, topdir=compose_dir, debug=opts.debug_mode, skip_phases=opts.skip_phase, just_phases=opts.just_phase,
|
||||
old_composes=opts.old_composes, koji_event=opts.koji_event, supported=opts.supported, logger=logger)
|
||||
kobo.log.add_file_logger(logger, compose.paths.log.log_file("global", "pungi.log"))
|
||||
COMPOSE = compose
|
||||
run_compose(compose)
|
||||
|
||||
|
||||
def run_compose(compose):
|
||||
import pungi.phases
|
||||
import pungi.metadata
|
||||
|
||||
compose.write_status("STARTED")
|
||||
compose.log_info("Host: %s" % socket.gethostname())
|
||||
compose.log_info("User name: %s" % getpass.getuser())
|
||||
compose.log_info("Working directory: %s" % os.getcwd())
|
||||
compose.log_info("Command line: %s" % " ".join([pipes.quote(arg) for arg in sys.argv]))
|
||||
compose.log_info("Compose top directory: %s" % compose.topdir)
|
||||
compose.read_variants()
|
||||
|
||||
# dump the config file
|
||||
date_str = datetime.datetime.strftime(datetime.datetime.now(), "%F_%X").replace(":", "-")
|
||||
config_dump = compose.paths.log.log_file("global", "config-dump_%s" % date_str)
|
||||
open(config_dump, "w").write(json.dumps(compose.conf, sort_keys=True, indent=4))
|
||||
|
||||
# initialize all phases
|
||||
init_phase = pungi.phases.InitPhase(compose)
|
||||
pkgset_phase = pungi.phases.PkgsetPhase(compose)
|
||||
createrepo_phase = pungi.phases.CreaterepoPhase(compose)
|
||||
buildinstall_phase = pungi.phases.BuildinstallPhase(compose)
|
||||
productimg_phase = pungi.phases.ProductimgPhase(compose, pkgset_phase)
|
||||
gather_phase = pungi.phases.GatherPhase(compose, pkgset_phase)
|
||||
extrafiles_phase = pungi.phases.ExtraFilesPhase(compose, pkgset_phase)
|
||||
createiso_phase = pungi.phases.CreateisoPhase(compose)
|
||||
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
||||
test_phase = pungi.phases.TestPhase(compose)
|
||||
|
||||
# check if all config options are set
|
||||
errors = []
|
||||
for phase in (init_phase, pkgset_phase, buildinstall_phase, productimg_phase, gather_phase, createiso_phase, test_phase):
|
||||
if phase.skip():
|
||||
continue
|
||||
if config.get('pungi', 'workdirbase') == '/work':
|
||||
config.set('pungi', 'workdirbase', "%s/work" % config.get('pungi', 'destdir'))
|
||||
# Set up our directories
|
||||
if not os.path.exists(config.get('pungi', 'destdir')):
|
||||
try:
|
||||
phase.validate()
|
||||
except ValueError as ex:
|
||||
for i in str(ex).splitlines():
|
||||
errors.append("%s: %s" % (phase.name.upper(), i))
|
||||
if errors:
|
||||
for i in errors:
|
||||
compose.log_error(i)
|
||||
print(i)
|
||||
sys.exit(1)
|
||||
os.makedirs(config.get('pungi', 'destdir'))
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create destination dir %s" % config.get('pungi', 'destdir')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >> sys.stdout, "Warning: Reusing existing destination directory."
|
||||
|
||||
# INIT phase
|
||||
init_phase.start()
|
||||
init_phase.stop()
|
||||
if not os.path.exists(config.get('pungi', 'workdirbase')):
|
||||
try:
|
||||
os.makedirs(config.get('pungi', 'workdirbase'))
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create working base dir %s" % config.get('pungi', 'workdirbase')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >> sys.stdout, "Warning: Reusing existing working base directory."
|
||||
|
||||
# PKGSET phase
|
||||
pkgset_phase.start()
|
||||
pkgset_phase.stop()
|
||||
cachedir = config.get('pungi', 'cachedir')
|
||||
|
||||
# BUILDINSTALL phase - start
|
||||
buildinstall_phase.start()
|
||||
if not os.path.exists(cachedir):
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create cache dir %s" % cachedir
|
||||
sys.exit(1)
|
||||
|
||||
# GATHER phase
|
||||
gather_phase.start()
|
||||
gather_phase.stop()
|
||||
|
||||
# EXTRA_FILES phase
|
||||
extrafiles_phase.start()
|
||||
extrafiles_phase.stop()
|
||||
|
||||
# CREATEREPO phase
|
||||
createrepo_phase.start()
|
||||
createrepo_phase.stop()
|
||||
|
||||
# BUILDINSTALL phase
|
||||
# must finish before PRODUCTIMG
|
||||
# must finish before CREATEISO
|
||||
buildinstall_phase.stop()
|
||||
if not buildinstall_phase.skip():
|
||||
buildinstall_phase.copy_files()
|
||||
|
||||
# PRODUCTIMG phase
|
||||
productimg_phase.start()
|
||||
productimg_phase.stop()
|
||||
|
||||
# write treeinfo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
for arch in variant.arches + ["src"]:
|
||||
pungi.metadata.write_tree_info(compose, arch, variant)
|
||||
|
||||
# write .discinfo and media.repo before ISOs are created
|
||||
for variant in compose.get_variants(recursive=True):
|
||||
if variant.type == "addon":
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
|
||||
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
|
||||
|
||||
# CREATEISO and LIVEIMAGES phases
|
||||
createiso_phase.start()
|
||||
liveimages_phase.start()
|
||||
|
||||
createiso_phase.stop()
|
||||
liveimages_phase.stop()
|
||||
|
||||
# merge checksum files
|
||||
for variant in compose.get_variants(types=["variant", "layered-product"]):
|
||||
for arch in variant.arches + ["src"]:
|
||||
iso_dir = compose.paths.compose.iso_dir(arch, variant, create_dir=False)
|
||||
if not iso_dir or not os.path.exists(iso_dir):
|
||||
continue
|
||||
for checksum_type in ("md5", "sha1", "sha256"):
|
||||
checksum_upper = "%sSUM" % checksum_type.upper()
|
||||
checksums = sorted([i for i in os.listdir(iso_dir) if i.endswith(".%s" % checksum_upper)])
|
||||
fo = open(os.path.join(iso_dir, checksum_upper), "w")
|
||||
for i in checksums:
|
||||
data = open(os.path.join(iso_dir, i), "r").read()
|
||||
fo.write(data)
|
||||
|
||||
pungi.metadata.write_compose_info(compose)
|
||||
compose.im.dump(compose.paths.compose.metadata("images.json")
|
||||
|
||||
# TEST phase
|
||||
test_phase.start()
|
||||
test_phase.stop()
|
||||
|
||||
# create a latest symlink
|
||||
compose_dir = os.path.basename(compose.topdir)
|
||||
symlink_name = "latest-%s-%s" % (compose.conf["product_short"], ".".join(compose.conf["product_version"].split(".")[:-1]))
|
||||
if compose.conf["product_is_layered"]:
|
||||
symlink_name += "-%s-%s" % (compose.conf["base_product_short"], compose.conf["base_product_version"])
|
||||
symlink = os.path.join(compose.topdir, "..", symlink_name)
|
||||
|
||||
try:
|
||||
os.unlink(symlink)
|
||||
except OSError as ex:
|
||||
if ex.errno != 2:
|
||||
raise
|
||||
try:
|
||||
os.symlink(compose_dir, symlink)
|
||||
except Exception as ex:
|
||||
print("ERROR: couldn't create latest symlink: %s" % ex)
|
||||
|
||||
compose.log_info("Compose finished: %s" % compose.topdir)
|
||||
compose.write_status("FINISHED")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except (Exception, KeyboardInterrupt) as ex:
|
||||
if COMPOSE:
|
||||
tb_path = COMPOSE.paths.log.log_file("global", "traceback")
|
||||
COMPOSE.log_error("Exception: %s" % ex)
|
||||
COMPOSE.log_error("Extended traceback in: %s" % tb_path)
|
||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||
COMPOSE.write_status("DOOMED")
|
||||
import kobo.tback
|
||||
open(tb_path, "w").write(kobo.tback.Traceback().get_traceback())
|
||||
# Set debuginfo flag
|
||||
if opts.nodebuginfo:
|
||||
config.set('pungi', 'debuginfo', "False")
|
||||
if opts.greedy:
|
||||
config.set('pungi', 'greedy', opts.greedy)
|
||||
else:
|
||||
# XXX: compatibility
|
||||
if opts.nogreedy:
|
||||
config.set('pungi', 'greedy', "none")
|
||||
else:
|
||||
print("Exception: %s" % ex)
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
raise
|
||||
config.set('pungi', 'greedy', "all")
|
||||
config.set('pungi', 'resolve_deps', str(bool(opts.resolve_deps)))
|
||||
if opts.isfinal:
|
||||
config.set('pungi', 'isfinal', "True")
|
||||
if opts.nohash:
|
||||
config.set('pungi', 'nohash', "True")
|
||||
if opts.full_archlist:
|
||||
config.set('pungi', 'full_archlist', "True")
|
||||
if opts.arch:
|
||||
config.set('pungi', 'arch', opts.arch)
|
||||
if opts.multilib:
|
||||
config.set('pungi', 'multilib', " ".join(opts.multilib))
|
||||
if opts.lookaside_repos:
|
||||
config.set('pungi', 'lookaside_repos', " ".join(opts.lookaside_repos))
|
||||
if opts.no_dvd:
|
||||
config.set('pungi', 'no_dvd', "True")
|
||||
if opts.nomacboot:
|
||||
config.set('pungi', 'nomacboot', "True")
|
||||
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
||||
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
||||
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
||||
config.set("pungi", "nodebuginfo", str(bool(opts.nodebuginfo)))
|
||||
|
||||
if opts.lorax_conf:
|
||||
config.set("lorax", "conf_file", opts.lorax_conf)
|
||||
if opts.installpkgs:
|
||||
config.set("lorax", "installpkgs", " ".join(opts.installpkgs))
|
||||
|
||||
# Actually do work.
|
||||
mypungi = pungi.gather.Pungi(config, ksparser)
|
||||
|
||||
with mypungi.yumlock:
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_gather or opts.do_buildinstall:
|
||||
mypungi._inityum() # initialize the yum object for things that need it
|
||||
if opts.do_all or opts.do_gather:
|
||||
mypungi.gather()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadPackages()
|
||||
mypungi.makeCompsFile()
|
||||
if not opts.nodebuginfo:
|
||||
mypungi.getDebuginfoList()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadDebuginfo()
|
||||
if not opts.nosource:
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadSRPMs()
|
||||
|
||||
print "RPM size: %s MiB" % (mypungi.size_packages() / 1024 ** 2)
|
||||
if not opts.nodebuginfo:
|
||||
print "DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024 ** 2)
|
||||
if not opts.nosource:
|
||||
print "SRPM size: %s MiB" % (mypungi.size_srpms() / 1024 ** 2)
|
||||
|
||||
# Furthermore (but without the yumlock...)
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_createrepo:
|
||||
mypungi.doCreaterepo()
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
if not opts.norelnotes:
|
||||
mypungi.doGetRelnotes()
|
||||
mypungi.doBuildinstall()
|
||||
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
# Do things slightly different for src.
|
||||
if opts.sourceisos:
|
||||
# we already have all the content gathered
|
||||
mypungi.topdir = os.path.join(config.get('pungi', 'destdir'),
|
||||
config.get('pungi', 'version'),
|
||||
config.get('pungi', 'variant'),
|
||||
'source', 'SRPMS')
|
||||
mypungi.doCreaterepo(comps=False)
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
print "All done!"
|
||||
|
||||
if __name__ == '__main__':
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
import time
|
||||
|
||||
today = time.strftime('%Y%m%d', time.localtime())
|
||||
|
||||
def get_arguments(config):
|
||||
parser = OptionParser("%prog [--help] [options]", version="%prog 3.13")
|
||||
|
||||
def set_config(option, opt_str, value, parser, config):
|
||||
config.set('pungi', option.dest, value)
|
||||
|
||||
# Pulled in from config file to be cli options as part of pykickstart conversion
|
||||
parser.add_option("--name", dest="family", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the name for your distribution (defaults to "Fedora"), DEPRECATED')
|
||||
parser.add_option("--family", dest="family", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the family name for your distribution (defaults to "Fedora")')
|
||||
parser.add_option("--ver", dest="version", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the version of your distribution (defaults to datestamp)')
|
||||
parser.add_option("--flavor", dest="variant", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the flavor of your distribution spin (optional), DEPRECATED')
|
||||
parser.add_option("--variant", dest="variant", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the variant of your distribution spin (optional)')
|
||||
parser.add_option("--destdir", dest="destdir", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='destination directory (defaults to current directory)')
|
||||
parser.add_option("--cachedir", dest="cachedir", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='package cache directory (defaults to /var/cache/pungi)')
|
||||
parser.add_option("--bugurl", dest="bugurl", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the url for your bug system (defaults to http://bugzilla.redhat.com)')
|
||||
parser.add_option("--selfhosting", action="store_true", dest="selfhosting",
|
||||
help='build a self-hosting tree by following build dependencies (optional)')
|
||||
parser.add_option("--fulltree", action="store_true", dest="fulltree",
|
||||
help='build a tree that includes all packages built from corresponding source rpms (optional)')
|
||||
parser.add_option("--nosource", action="store_true", dest="nosource",
|
||||
help='disable gathering of source packages (optional)')
|
||||
parser.add_option("--nodebuginfo", action="store_true", dest="nodebuginfo",
|
||||
help='disable gathering of debuginfo packages (optional)')
|
||||
parser.add_option("--nodownload", action="store_true", dest="nodownload",
|
||||
help='disable downloading of packages. instead, print the package URLs (optional)')
|
||||
parser.add_option("--norelnotes", action="store_true", dest="norelnotes",
|
||||
help='disable gathering of release notes (optional); DEPRECATED')
|
||||
parser.add_option("--nogreedy", action="store_true", dest="nogreedy",
|
||||
help='disable pulling of all providers of package dependencies (optional)')
|
||||
parser.add_option("--nodeps", action="store_false", dest="resolve_deps", default=True,
|
||||
help='disable resolving dependencies')
|
||||
parser.add_option("--sourceisos", default=False, action="store_true", dest="sourceisos",
|
||||
help='Create the source isos (other arch runs must be done)')
|
||||
parser.add_option("--force", default=False, action="store_true",
|
||||
help='Force reuse of an existing destination directory (will overwrite files)')
|
||||
parser.add_option("--isfinal", default=False, action="store_true",
|
||||
help='Specify this is a GA tree, which causes betanag to be turned off during install')
|
||||
parser.add_option("--nohash", default=False, action="store_true",
|
||||
help='disable hashing the Packages trees')
|
||||
parser.add_option("--full-archlist", action="store_true",
|
||||
help='Use the full arch list for x86_64 (include i686, i386, etc.)')
|
||||
parser.add_option("--arch",
|
||||
help='Override default (uname based) arch')
|
||||
parser.add_option("--greedy", metavar="METHOD",
|
||||
help='Greedy method; none, all, build')
|
||||
parser.add_option("--multilib", action="append", metavar="METHOD",
|
||||
help='Multilib method; can be specified multiple times; recommended: devel, runtime')
|
||||
parser.add_option("--lookaside-repo", action="append", dest="lookaside_repos", metavar="NAME",
|
||||
help='Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)')
|
||||
parser.add_option("--workdirbase", dest="workdirbase", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='base working directory (defaults to destdir + /work)')
|
||||
parser.add_option("--no-dvd", default=False, action="store_true", dest="no_dvd",
|
||||
help='Do not make a install DVD/CD only the netinstall image and the tree')
|
||||
parser.add_option("--lorax-conf", type="string",
|
||||
help='Path to lorax.conf file (optional)')
|
||||
parser.add_option("-i", "--installpkgs", default=[],
|
||||
action="append", metavar="STRING",
|
||||
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)")
|
||||
parser.add_option("--multilibconf", default=None, type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help="Path to multilib conf files. Default is /usr/share/pungi/multilib/")
|
||||
|
||||
parser.add_option("-c", "--config", dest="config",
|
||||
help='Path to kickstart config file')
|
||||
parser.add_option("--all-stages", action="store_true", default=True, dest="do_all",
|
||||
help="Enable ALL stages")
|
||||
parser.add_option("-G", action="store_true", default=False, dest="do_gather",
|
||||
help="Flag to enable processing the Gather stage")
|
||||
parser.add_option("-C", action="store_true", default=False, dest="do_createrepo",
|
||||
help="Flag to enable processing the Createrepo stage")
|
||||
parser.add_option("-B", action="store_true", default=False, dest="do_buildinstall",
|
||||
help="Flag to enable processing the BuildInstall stage")
|
||||
parser.add_option("-I", action="store_true", default=False, dest="do_createiso",
|
||||
help="Flag to enable processing the CreateISO stage")
|
||||
parser.add_option("--relnotepkgs", dest="relnotepkgs", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='Rpms which contain the release notes')
|
||||
parser.add_option("--relnotefilere", dest="relnotefilere", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='Which files are the release notes -- GPL EULA')
|
||||
parser.add_option("--nomacboot", action="store_true", dest="nomacboot", help='disable setting up macboot as no hfs support ')
|
||||
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
|
||||
if not opts.config:
|
||||
parser.error("Please specify a config file")
|
||||
|
||||
if not config.get('pungi', 'variant').isalnum() and not config.get('pungi', 'variant') == '':
|
||||
parser.error("Variant must be alphanumeric")
|
||||
|
||||
if opts.do_gather or opts.do_createrepo or opts.do_buildinstall or opts.do_createiso:
|
||||
opts.do_all = False
|
||||
|
||||
if opts.arch and (opts.do_all or opts.do_buildinstall):
|
||||
parser.error("Cannot override arch while the BuildInstall stage is enabled")
|
||||
|
||||
# set the iso_basename.
|
||||
if not config.get('pungi', 'variant') == '':
|
||||
config.set('pungi', 'iso_basename', '%s-%s' % (config.get('pungi', 'family'), config.get('pungi', 'variant')))
|
||||
else:
|
||||
config.set('pungi', 'iso_basename', config.get('pungi', 'family'))
|
||||
|
||||
return (opts, args)
|
||||
|
||||
main()
|
||||
|
325
bin/pungi-gather
325
bin/pungi-gather
@ -1,325 +0,0 @@
|
||||
#!/usr/bin/python -tt
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; version 2 of the License.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Library General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
|
||||
import os
|
||||
import pungi.gather
|
||||
import pungi.config
|
||||
import pungi.ks
|
||||
import subprocess
|
||||
|
||||
def main():
|
||||
|
||||
config = pungi.config.Config()
|
||||
|
||||
(opts, args) = get_arguments(config)
|
||||
|
||||
# You must be this high to ride if you're going to do root tasks
|
||||
if os.geteuid () != 0 and (opts.do_all or opts.do_buildinstall):
|
||||
print >> sys.stderr, "You must run pungi as root"
|
||||
return 1
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
try:
|
||||
selinux = subprocess.Popen('/usr/sbin/getenforce',
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=open('/dev/null', 'w')).communicate()[0].strip('\n')
|
||||
if selinux == 'Enforcing':
|
||||
print >> sys.stdout, "WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled."
|
||||
print >> sys.stdout, "Consider running with setenforce 0."
|
||||
except:
|
||||
pass
|
||||
|
||||
# Set up the kickstart parser and pass in the kickstart file we were handed
|
||||
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
||||
|
||||
if opts.sourceisos:
|
||||
config.set('pungi', 'arch', 'source')
|
||||
|
||||
for part in ksparser.handler.partition.partitions:
|
||||
if part.mountpoint == 'iso':
|
||||
config.set('pungi', 'cdsize', str(part.size))
|
||||
|
||||
config.set('pungi', 'force', str(opts.force))
|
||||
|
||||
if config.get('pungi', 'workdirbase') == '/work':
|
||||
config.set('pungi', 'workdirbase', "%s/work" % config.get('pungi', 'destdir'))
|
||||
# Set up our directories
|
||||
if not os.path.exists(config.get('pungi', 'destdir')):
|
||||
try:
|
||||
os.makedirs(config.get('pungi', 'destdir'))
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create destination dir %s" % config.get('pungi', 'destdir')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >> sys.stdout, "Warning: Reusing existing destination directory."
|
||||
|
||||
if not os.path.exists(config.get('pungi', 'workdirbase')):
|
||||
try:
|
||||
os.makedirs(config.get('pungi', 'workdirbase'))
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create working base dir %s" % config.get('pungi', 'workdirbase')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print >> sys.stdout, "Warning: Reusing existing working base directory."
|
||||
|
||||
cachedir = config.get('pungi', 'cachedir')
|
||||
|
||||
if not os.path.exists(cachedir):
|
||||
try:
|
||||
os.makedirs(cachedir)
|
||||
except OSError, e:
|
||||
print >> sys.stderr, "Error: Cannot create cache dir %s" % cachedir
|
||||
sys.exit(1)
|
||||
|
||||
# Set debuginfo flag
|
||||
if opts.nodebuginfo:
|
||||
config.set('pungi', 'debuginfo', "False")
|
||||
if opts.greedy:
|
||||
config.set('pungi', 'greedy', opts.greedy)
|
||||
else:
|
||||
# XXX: compatibility
|
||||
if opts.nogreedy:
|
||||
config.set('pungi', 'greedy', "none")
|
||||
else:
|
||||
config.set('pungi', 'greedy', "all")
|
||||
config.set('pungi', 'resolve_deps', str(bool(opts.resolve_deps)))
|
||||
if opts.isfinal:
|
||||
config.set('pungi', 'isfinal', "True")
|
||||
if opts.nohash:
|
||||
config.set('pungi', 'nohash', "True")
|
||||
if opts.full_archlist:
|
||||
config.set('pungi', 'full_archlist', "True")
|
||||
if opts.arch:
|
||||
config.set('pungi', 'arch', opts.arch)
|
||||
if opts.multilib:
|
||||
config.set('pungi', 'multilib', " ".join(opts.multilib))
|
||||
if opts.lookaside_repos:
|
||||
config.set('pungi', 'lookaside_repos', " ".join(opts.lookaside_repos))
|
||||
if opts.no_dvd:
|
||||
config.set('pungi', 'no_dvd', "True")
|
||||
if opts.nomacboot:
|
||||
config.set('pungi', 'nomacboot', "True")
|
||||
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
||||
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
||||
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
||||
config.set("pungi", "nodebuginfo", str(bool(opts.nodebuginfo)))
|
||||
|
||||
if opts.lorax_conf:
|
||||
config.set("lorax", "conf_file", opts.lorax_conf)
|
||||
if opts.installpkgs:
|
||||
config.set("lorax", "installpkgs", " ".join(opts.installpkgs))
|
||||
|
||||
# Actually do work.
|
||||
mypungi = pungi.gather.Pungi(config, ksparser)
|
||||
|
||||
with mypungi.yumlock:
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_gather or opts.do_buildinstall:
|
||||
mypungi._inityum() # initialize the yum object for things that need it
|
||||
if opts.do_all or opts.do_gather:
|
||||
mypungi.gather()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_packages():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("RPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadPackages()
|
||||
mypungi.makeCompsFile()
|
||||
if not opts.nodebuginfo:
|
||||
mypungi.getDebuginfoList()
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_debuginfo():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadDebuginfo()
|
||||
if not opts.nosource:
|
||||
if opts.nodownload:
|
||||
for line in mypungi.list_srpms():
|
||||
flags_str = ",".join(line["flags"])
|
||||
if flags_str:
|
||||
flags_str = "(%s)" % flags_str
|
||||
sys.stdout.write("SRPM%s: %s\n" % (flags_str, line["path"]))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
mypungi.downloadSRPMs()
|
||||
|
||||
print "RPM size: %s MiB" % (mypungi.size_packages() / 1024 ** 2)
|
||||
if not opts.nodebuginfo:
|
||||
print "DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024 ** 2)
|
||||
if not opts.nosource:
|
||||
print "SRPM size: %s MiB" % (mypungi.size_srpms() / 1024 ** 2)
|
||||
|
||||
# Furthermore (but without the yumlock...)
|
||||
if not opts.sourceisos:
|
||||
if opts.do_all or opts.do_createrepo:
|
||||
mypungi.doCreaterepo()
|
||||
|
||||
if opts.do_all or opts.do_buildinstall:
|
||||
if not opts.norelnotes:
|
||||
mypungi.doGetRelnotes()
|
||||
mypungi.doBuildinstall()
|
||||
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
# Do things slightly different for src.
|
||||
if opts.sourceisos:
|
||||
# we already have all the content gathered
|
||||
mypungi.topdir = os.path.join(config.get('pungi', 'destdir'),
|
||||
config.get('pungi', 'version'),
|
||||
config.get('pungi', 'variant'),
|
||||
'source', 'SRPMS')
|
||||
mypungi.doCreaterepo(comps=False)
|
||||
if opts.do_all or opts.do_createiso:
|
||||
mypungi.doCreateIsos()
|
||||
|
||||
print "All done!"
|
||||
|
||||
if __name__ == '__main__':
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
import time
|
||||
|
||||
today = time.strftime('%Y%m%d', time.localtime())
|
||||
|
||||
def get_arguments(config):
|
||||
parser = OptionParser("%prog [--help] [options]", version="%prog 3.13")
|
||||
|
||||
def set_config(option, opt_str, value, parser, config):
|
||||
config.set('pungi', option.dest, value)
|
||||
|
||||
# Pulled in from config file to be cli options as part of pykickstart conversion
|
||||
parser.add_option("--name", dest="family", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the name for your distribution (defaults to "Fedora"), DEPRECATED')
|
||||
parser.add_option("--family", dest="family", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the family name for your distribution (defaults to "Fedora")')
|
||||
parser.add_option("--ver", dest="version", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the version of your distribution (defaults to datestamp)')
|
||||
parser.add_option("--flavor", dest="variant", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the flavor of your distribution spin (optional), DEPRECATED')
|
||||
parser.add_option("--variant", dest="variant", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the variant of your distribution spin (optional)')
|
||||
parser.add_option("--destdir", dest="destdir", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='destination directory (defaults to current directory)')
|
||||
parser.add_option("--cachedir", dest="cachedir", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='package cache directory (defaults to /var/cache/pungi)')
|
||||
parser.add_option("--bugurl", dest="bugurl", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='the url for your bug system (defaults to http://bugzilla.redhat.com)')
|
||||
parser.add_option("--selfhosting", action="store_true", dest="selfhosting",
|
||||
help='build a self-hosting tree by following build dependencies (optional)')
|
||||
parser.add_option("--fulltree", action="store_true", dest="fulltree",
|
||||
help='build a tree that includes all packages built from corresponding source rpms (optional)')
|
||||
parser.add_option("--nosource", action="store_true", dest="nosource",
|
||||
help='disable gathering of source packages (optional)')
|
||||
parser.add_option("--nodebuginfo", action="store_true", dest="nodebuginfo",
|
||||
help='disable gathering of debuginfo packages (optional)')
|
||||
parser.add_option("--nodownload", action="store_true", dest="nodownload",
|
||||
help='disable downloading of packages. instead, print the package URLs (optional)')
|
||||
parser.add_option("--norelnotes", action="store_true", dest="norelnotes",
|
||||
help='disable gathering of release notes (optional); DEPRECATED')
|
||||
parser.add_option("--nogreedy", action="store_true", dest="nogreedy",
|
||||
help='disable pulling of all providers of package dependencies (optional)')
|
||||
parser.add_option("--nodeps", action="store_false", dest="resolve_deps", default=True,
|
||||
help='disable resolving dependencies')
|
||||
parser.add_option("--sourceisos", default=False, action="store_true", dest="sourceisos",
|
||||
help='Create the source isos (other arch runs must be done)')
|
||||
parser.add_option("--force", default=False, action="store_true",
|
||||
help='Force reuse of an existing destination directory (will overwrite files)')
|
||||
parser.add_option("--isfinal", default=False, action="store_true",
|
||||
help='Specify this is a GA tree, which causes betanag to be turned off during install')
|
||||
parser.add_option("--nohash", default=False, action="store_true",
|
||||
help='disable hashing the Packages trees')
|
||||
parser.add_option("--full-archlist", action="store_true",
|
||||
help='Use the full arch list for x86_64 (include i686, i386, etc.)')
|
||||
parser.add_option("--arch",
|
||||
help='Override default (uname based) arch')
|
||||
parser.add_option("--greedy", metavar="METHOD",
|
||||
help='Greedy method; none, all, build')
|
||||
parser.add_option("--multilib", action="append", metavar="METHOD",
|
||||
help='Multilib method; can be specified multiple times; recommended: devel, runtime')
|
||||
parser.add_option("--lookaside-repo", action="append", dest="lookaside_repos", metavar="NAME",
|
||||
help='Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)')
|
||||
parser.add_option("--workdirbase", dest="workdirbase", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='base working directory (defaults to destdir + /work)')
|
||||
parser.add_option("--no-dvd", default=False, action="store_true", dest="no_dvd",
|
||||
help='Do not make a install DVD/CD only the netinstall image and the tree')
|
||||
parser.add_option("--lorax-conf", type="string",
|
||||
help='Path to lorax.conf file (optional)')
|
||||
parser.add_option("-i", "--installpkgs", default=[],
|
||||
action="append", metavar="STRING",
|
||||
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)")
|
||||
parser.add_option("--multilibconf", default=None, type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help="Path to multilib conf files. Default is /usr/share/pungi/multilib/")
|
||||
|
||||
parser.add_option("-c", "--config", dest="config",
|
||||
help='Path to kickstart config file')
|
||||
parser.add_option("--all-stages", action="store_true", default=True, dest="do_all",
|
||||
help="Enable ALL stages")
|
||||
parser.add_option("-G", action="store_true", default=False, dest="do_gather",
|
||||
help="Flag to enable processing the Gather stage")
|
||||
parser.add_option("-C", action="store_true", default=False, dest="do_createrepo",
|
||||
help="Flag to enable processing the Createrepo stage")
|
||||
parser.add_option("-B", action="store_true", default=False, dest="do_buildinstall",
|
||||
help="Flag to enable processing the BuildInstall stage")
|
||||
parser.add_option("-I", action="store_true", default=False, dest="do_createiso",
|
||||
help="Flag to enable processing the CreateISO stage")
|
||||
parser.add_option("--relnotepkgs", dest="relnotepkgs", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='Rpms which contain the release notes')
|
||||
parser.add_option("--relnotefilere", dest="relnotefilere", type="string",
|
||||
action="callback", callback=set_config, callback_args=(config, ),
|
||||
help='Which files are the release notes -- GPL EULA')
|
||||
parser.add_option("--nomacboot", action="store_true", dest="nomacboot", help='disable setting up macboot as no hfs support ')
|
||||
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
|
||||
if not opts.config:
|
||||
parser.error("Please specify a config file")
|
||||
|
||||
if not config.get('pungi', 'variant').isalnum() and not config.get('pungi', 'variant') == '':
|
||||
parser.error("Variant must be alphanumeric")
|
||||
|
||||
if opts.do_gather or opts.do_createrepo or opts.do_buildinstall or opts.do_createiso:
|
||||
opts.do_all = False
|
||||
|
||||
if opts.arch and (opts.do_all or opts.do_buildinstall):
|
||||
parser.error("Cannot override arch while the BuildInstall stage is enabled")
|
||||
|
||||
# set the iso_basename.
|
||||
if not config.get('pungi', 'variant') == '':
|
||||
config.set('pungi', 'iso_basename', '%s-%s' % (config.get('pungi', 'family'), config.get('pungi', 'variant')))
|
||||
else:
|
||||
config.set('pungi', 'iso_basename', config.get('pungi', 'family'))
|
||||
|
||||
return (opts, args)
|
||||
|
||||
main()
|
340
bin/pungi-koji
Executable file
340
bin/pungi-koji
Executable file
@ -0,0 +1,340 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import optparse
|
||||
import logging
|
||||
import locale
|
||||
import datetime
|
||||
import getpass
|
||||
import socket
|
||||
import json
|
||||
import pipes
|
||||
|
||||
here = sys.path[0]
|
||||
if here != '/usr/bin':
|
||||
# Git checkout
|
||||
sys.path[0] = os.path.dirname(here)
|
||||
|
||||
from pungi import __version__
|
||||
|
||||
|
||||
# force C locales
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
|
||||
|
||||
COMPOSE = None
|
||||
|
||||
|
||||
def main():
|
||||
global COMPOSE
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option(
|
||||
"--target-dir",
|
||||
metavar="PATH",
|
||||
help="a compose is created under this directory",
|
||||
)
|
||||
parser.add_option(
|
||||
"--label",
|
||||
help="specify compose label (example: Snapshot-1.0); required for production composes"
|
||||
)
|
||||
parser.add_option(
|
||||
"--no-label",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="make a production compose without label"
|
||||
)
|
||||
parser.add_option(
|
||||
"--supported",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="set supported flag on media (automatically on for 'RC-x.y' labels)"
|
||||
)
|
||||
parser.add_option(
|
||||
"--old-composes",
|
||||
metavar="PATH",
|
||||
dest="old_composes",
|
||||
default=[],
|
||||
action="append",
|
||||
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
|
||||
)
|
||||
parser.add_option(
|
||||
"--compose-dir",
|
||||
metavar="PATH",
|
||||
help="reuse an existing compose directory (DANGEROUS!)",
|
||||
)
|
||||
parser.add_option(
|
||||
"--debug-mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="run pungi in DEBUG mode (DANGEROUS!)",
|
||||
)
|
||||
parser.add_option(
|
||||
"--config",
|
||||
help="Config file"
|
||||
)
|
||||
parser.add_option(
|
||||
"--skip-phase",
|
||||
metavar="PHASE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="skip a compose phase",
|
||||
)
|
||||
parser.add_option(
|
||||
"--just-phase",
|
||||
metavar="PHASE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="run only a specified compose phase",
|
||||
)
|
||||
parser.add_option(
|
||||
"--nightly",
|
||||
action="store_const",
|
||||
const="nightly",
|
||||
dest="compose_type",
|
||||
help="make a nightly compose",
|
||||
)
|
||||
parser.add_option(
|
||||
"--test",
|
||||
action="store_const",
|
||||
const="test",
|
||||
dest="compose_type",
|
||||
help="make a test compose",
|
||||
)
|
||||
parser.add_option(
|
||||
"--koji-event",
|
||||
metavar="ID",
|
||||
type="int",
|
||||
help="specify a koji event for populating package set",
|
||||
)
|
||||
parser.add_option(
|
||||
"--version",
|
||||
action="store_true",
|
||||
help="output version information and exit",
|
||||
)
|
||||
|
||||
opts, args = parser.parse_args()
|
||||
|
||||
if opts.version:
|
||||
print("pungi %s" % __version__)
|
||||
sys.exit(0)
|
||||
|
||||
if opts.target_dir and opts.compose_dir:
|
||||
parser.error("cannot specify --target-dir and --compose-dir at once")
|
||||
|
||||
if not opts.target_dir and not opts.compose_dir:
|
||||
parser.error("please specify a target directory")
|
||||
|
||||
if opts.target_dir and not opts.compose_dir:
|
||||
opts.target_dir = os.path.abspath(opts.target_dir)
|
||||
if not os.path.isdir(opts.target_dir):
|
||||
parser.error("The target directory does not exist or is not a directory: %s" % opts.target_dir)
|
||||
else:
|
||||
opts.compose_dir = os.path.abspath(opts.compose_dir)
|
||||
if not os.path.isdir(opts.compose_dir):
|
||||
parser.error("The compose directory does not exist or is not a directory: %s" % opts.compose_dir)
|
||||
|
||||
compose_type = opts.compose_type or "production"
|
||||
if compose_type == "production" and not opts.label and not opts.no_label:
|
||||
parser.error("must specify label for a production compose")
|
||||
|
||||
if not opts.config:
|
||||
parser.error("please specify a config")
|
||||
opts.config = os.path.abspath(opts.config)
|
||||
|
||||
# check if all requirements are met
|
||||
import pungi.checks
|
||||
if not pungi.checks.check():
|
||||
sys.exit(1)
|
||||
|
||||
import kobo.conf
|
||||
import kobo.log
|
||||
import productmd.composeinfo.compose
|
||||
|
||||
if opts.label:
|
||||
try:
|
||||
productmd.composeinfo.compose.verify_label(opts.label)
|
||||
except ValueError as ex:
|
||||
parser.error(str(ex))
|
||||
|
||||
from pungi.compose import Compose
|
||||
|
||||
logger = logging.Logger("Pungi")
|
||||
kobo.log.add_stderr_logger(logger)
|
||||
|
||||
conf = kobo.conf.PyConfigParser()
|
||||
conf.load_from_file(opts.config)
|
||||
|
||||
if opts.target_dir:
|
||||
compose_dir = Compose.get_compose_dir(opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label)
|
||||
else:
|
||||
compose_dir = opts.compose_dir
|
||||
|
||||
compose = Compose(conf, topdir=compose_dir, debug=opts.debug_mode, skip_phases=opts.skip_phase, just_phases=opts.just_phase,
|
||||
old_composes=opts.old_composes, koji_event=opts.koji_event, supported=opts.supported, logger=logger)
|
||||
kobo.log.add_file_logger(logger, compose.paths.log.log_file("global", "pungi.log"))
|
||||
COMPOSE = compose
|
||||
run_compose(compose)
|
||||
|
||||
|
||||
def run_compose(compose):
|
||||
import pungi.phases
|
||||
import pungi.metadata
|
||||
|
||||
compose.write_status("STARTED")
|
||||
compose.log_info("Host: %s" % socket.gethostname())
|
||||
compose.log_info("User name: %s" % getpass.getuser())
|
||||
compose.log_info("Working directory: %s" % os.getcwd())
|
||||
compose.log_info("Command line: %s" % " ".join([pipes.quote(arg) for arg in sys.argv]))
|
||||
compose.log_info("Compose top directory: %s" % compose.topdir)
|
||||
compose.read_variants()
|
||||
|
||||
# dump the config file
|
||||
date_str = datetime.datetime.strftime(datetime.datetime.now(), "%F_%X").replace(":", "-")
|
||||
config_dump = compose.paths.log.log_file("global", "config-dump_%s" % date_str)
|
||||
open(config_dump, "w").write(json.dumps(compose.conf, sort_keys=True, indent=4))
|
||||
|
||||
# initialize all phases
|
||||
init_phase = pungi.phases.InitPhase(compose)
|
||||
pkgset_phase = pungi.phases.PkgsetPhase(compose)
|
||||
createrepo_phase = pungi.phases.CreaterepoPhase(compose)
|
||||
buildinstall_phase = pungi.phases.BuildinstallPhase(compose)
|
||||
productimg_phase = pungi.phases.ProductimgPhase(compose, pkgset_phase)
|
||||
gather_phase = pungi.phases.GatherPhase(compose, pkgset_phase)
|
||||
extrafiles_phase = pungi.phases.ExtraFilesPhase(compose, pkgset_phase)
|
||||
createiso_phase = pungi.phases.CreateisoPhase(compose)
|
||||
liveimages_phase = pungi.phases.LiveImagesPhase(compose)
|
||||
test_phase = pungi.phases.TestPhase(compose)
|
||||
|
||||
# check if all config options are set
|
||||
errors = []
|
||||
for phase in (init_phase, pkgset_phase, buildinstall_phase, productimg_phase, gather_phase, createiso_phase, test_phase):
|
||||
if phase.skip():
|
||||
continue
|
||||
try:
|
||||
phase.validate()
|
||||
except ValueError as ex:
|
||||
for i in str(ex).splitlines():
|
||||
errors.append("%s: %s" % (phase.name.upper(), i))
|
||||
if errors:
|
||||
for i in errors:
|
||||
compose.log_error(i)
|
||||
print(i)
|
||||
sys.exit(1)
|
||||
|
||||
# INIT phase
|
||||
init_phase.start()
|
||||
init_phase.stop()
|
||||
|
||||
# PKGSET phase
|
||||
pkgset_phase.start()
|
||||
pkgset_phase.stop()
|
||||
|
||||
# BUILDINSTALL phase - start
|
||||
buildinstall_phase.start()
|
||||
|
||||
# GATHER phase
|
||||
gather_phase.start()
|
||||
gather_phase.stop()
|
||||
|
||||
# EXTRA_FILES phase
|
||||
extrafiles_phase.start()
|
||||
extrafiles_phase.stop()
|
||||
|
||||
# CREATEREPO phase
|
||||
createrepo_phase.start()
|
||||
createrepo_phase.stop()
|
||||
|
||||
# BUILDINSTALL phase
|
||||
# must finish before PRODUCTIMG
|
||||
# must finish before CREATEISO
|
||||
buildinstall_phase.stop()
|
||||
if not buildinstall_phase.skip():
|
||||
buildinstall_phase.copy_files()
|
||||
|
||||
# PRODUCTIMG phase
|
||||
productimg_phase.start()
|
||||
productimg_phase.stop()
|
||||
|
||||
# write treeinfo before ISOs are created
|
||||
for variant in compose.get_variants():
|
||||
for arch in variant.arches + ["src"]:
|
||||
pungi.metadata.write_tree_info(compose, arch, variant)
|
||||
|
||||
# write .discinfo and media.repo before ISOs are created
|
||||
for variant in compose.get_variants(recursive=True):
|
||||
if variant.type == "addon":
|
||||
continue
|
||||
for arch in variant.arches + ["src"]:
|
||||
timestamp = pungi.metadata.write_discinfo(compose, arch, variant)
|
||||
pungi.metadata.write_media_repo(compose, arch, variant, timestamp)
|
||||
|
||||
# CREATEISO and LIVEIMAGES phases
|
||||
createiso_phase.start()
|
||||
liveimages_phase.start()
|
||||
|
||||
createiso_phase.stop()
|
||||
liveimages_phase.stop()
|
||||
|
||||
# merge checksum files
|
||||
for variant in compose.get_variants(types=["variant", "layered-product"]):
|
||||
for arch in variant.arches + ["src"]:
|
||||
iso_dir = compose.paths.compose.iso_dir(arch, variant, create_dir=False)
|
||||
if not iso_dir or not os.path.exists(iso_dir):
|
||||
continue
|
||||
for checksum_type in ("md5", "sha1", "sha256"):
|
||||
checksum_upper = "%sSUM" % checksum_type.upper()
|
||||
checksums = sorted([i for i in os.listdir(iso_dir) if i.endswith(".%s" % checksum_upper)])
|
||||
fo = open(os.path.join(iso_dir, checksum_upper), "w")
|
||||
for i in checksums:
|
||||
data = open(os.path.join(iso_dir, i), "r").read()
|
||||
fo.write(data)
|
||||
|
||||
pungi.metadata.write_compose_info(compose)
|
||||
compose.im.dump(compose.paths.compose.metadata("images.json")
|
||||
|
||||
# TEST phase
|
||||
test_phase.start()
|
||||
test_phase.stop()
|
||||
|
||||
# create a latest symlink
|
||||
compose_dir = os.path.basename(compose.topdir)
|
||||
symlink_name = "latest-%s-%s" % (compose.conf["product_short"], ".".join(compose.conf["product_version"].split(".")[:-1]))
|
||||
if compose.conf["product_is_layered"]:
|
||||
symlink_name += "-%s-%s" % (compose.conf["base_product_short"], compose.conf["base_product_version"])
|
||||
symlink = os.path.join(compose.topdir, "..", symlink_name)
|
||||
|
||||
try:
|
||||
os.unlink(symlink)
|
||||
except OSError as ex:
|
||||
if ex.errno != 2:
|
||||
raise
|
||||
try:
|
||||
os.symlink(compose_dir, symlink)
|
||||
except Exception as ex:
|
||||
print("ERROR: couldn't create latest symlink: %s" % ex)
|
||||
|
||||
compose.log_info("Compose finished: %s" % compose.topdir)
|
||||
compose.write_status("FINISHED")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except (Exception, KeyboardInterrupt) as ex:
|
||||
if COMPOSE:
|
||||
tb_path = COMPOSE.paths.log.log_file("global", "traceback")
|
||||
COMPOSE.log_error("Exception: %s" % ex)
|
||||
COMPOSE.log_error("Extended traceback in: %s" % tb_path)
|
||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||
COMPOSE.write_status("DOOMED")
|
||||
import kobo.tback
|
||||
open(tb_path, "w").write(kobo.tback.Traceback().get_traceback())
|
||||
else:
|
||||
print("Exception: %s" % ex)
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
raise
|
@ -112,7 +112,7 @@ class PungiWrapper(object):
|
||||
kickstart.close()
|
||||
|
||||
def get_pungi_cmd(self, config, destdir, name, version=None, flavor=None, selfhosting=False, fulltree=False, greedy=None, nodeps=False, nodownload=True, full_archlist=False, arch=None, cache_dir=None, lookaside_repos=None, multilib_methods=None):
|
||||
cmd = ["pungi-gather"]
|
||||
cmd = ["pungi"]
|
||||
|
||||
# Gather stage
|
||||
cmd.append("-G")
|
||||
|
Loading…
Reference in New Issue
Block a user