2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2012-11-12 14:59:02 +00:00
|
|
|
|
|
|
|
|
2007-08-15 23:19:13 +00:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2007-08-15 23:19:13 +00:00
|
|
|
|
2012-11-12 14:59:02 +00:00
|
|
|
|
2018-10-08 12:06:06 +00:00
|
|
|
import logging
|
2007-08-15 23:19:13 +00:00
|
|
|
import os
|
2008-06-12 13:00:43 +00:00
|
|
|
import re
|
2007-11-21 14:17:14 +00:00
|
|
|
import shutil
|
2018-10-08 12:06:06 +00:00
|
|
|
import subprocess
|
2007-12-02 19:31:26 +00:00
|
|
|
import sys
|
2018-10-08 12:06:06 +00:00
|
|
|
from fnmatch import fnmatch
|
|
|
|
|
2014-04-01 19:56:29 +00:00
|
|
|
import lockfile
|
2008-06-12 13:00:43 +00:00
|
|
|
import urlgrabber.progress
|
2018-10-08 12:06:06 +00:00
|
|
|
import yum
|
|
|
|
from productmd.common import SortedConfigParser
|
2018-12-31 11:12:35 +00:00
|
|
|
import ConfigParser
|
2012-03-05 11:10:17 +00:00
|
|
|
|
2012-11-12 14:59:02 +00:00
|
|
|
import arch as arch_module
|
2017-03-01 09:10:10 +00:00
|
|
|
import multilib_yum as multilib
|
2018-10-08 12:06:06 +00:00
|
|
|
import pungi.util
|
2018-12-30 16:47:24 +00:00
|
|
|
from pungi.wrappers.createrepo import CreaterepoWrapper
|
2007-08-15 23:19:13 +00:00
|
|
|
|
2019-05-31 07:14:26 +00:00
|
|
|
|
2014-04-01 19:56:29 +00:00
|
|
|
class ReentrantYumLock(object):
|
|
|
|
""" A lock that can be acquired multiple times by the same process. """
|
|
|
|
|
|
|
|
def __init__(self, lock, log):
|
|
|
|
self.lock = lock
|
|
|
|
self.log = log
|
|
|
|
self.count = 0
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
if not self.count:
|
|
|
|
self.log.info("Waiting on %r" % self.lock.lock_file)
|
|
|
|
self.lock.acquire()
|
|
|
|
self.log.info("Got %r" % self.lock.lock_file)
|
|
|
|
self.count = self.count + 1
|
|
|
|
self.log.info("Lock count upped to %i" % self.count)
|
|
|
|
|
|
|
|
def __exit__(self, type, value, tb):
|
|
|
|
self.count = self.count - 1
|
|
|
|
self.log.info("Lock count downed to %i" % self.count)
|
|
|
|
self.log.info("%r %r %r" % (type, value, tb))
|
|
|
|
if not self.count:
|
|
|
|
self.lock.release()
|
|
|
|
self.log.info("Released %r" % self.lock.lock_file)
|
|
|
|
|
|
|
|
|
|
|
|
def yumlocked(method):
|
|
|
|
""" A locking decorator. """
|
|
|
|
def wrapper(self, *args, **kwargs):
|
|
|
|
with self.yumlock:
|
|
|
|
return method(self, *args, **kwargs)
|
|
|
|
# TODO - replace argspec, signature, etc..
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2012-09-25 20:15:35 +00:00
|
|
|
def is_source(po):
|
|
|
|
if po.arch in ("src", "nosrc"):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2012-11-26 07:32:01 +00:00
|
|
|
def is_noarch(po):
|
|
|
|
if po.arch == "noarch":
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2012-09-25 20:15:35 +00:00
|
|
|
def is_package(po):
|
2017-07-26 20:25:47 +00:00
|
|
|
if pungi.util.pkg_is_debug(po):
|
2012-09-25 20:15:35 +00:00
|
|
|
return False
|
|
|
|
if is_source(po):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2017-04-26 07:46:35 +00:00
|
|
|
FLAGS = {
|
|
|
|
'EQ': '=',
|
|
|
|
'GE': '>=',
|
|
|
|
'LE': '<=',
|
|
|
|
'GT': '>',
|
|
|
|
'LT': '<',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class Req(object):
|
|
|
|
"""A wrapper for a tuple representing a Requires tag.
|
|
|
|
|
|
|
|
Only useful for formatting the value into a human readable string.
|
|
|
|
"""
|
|
|
|
def __init__(self, req):
|
|
|
|
self.r, self.f, self.v = req
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
if self.f and self.v:
|
|
|
|
flag = FLAGS.get(self.f, '??')
|
|
|
|
version = '%s:%s-%s' % self.v
|
|
|
|
return '%s %s %s' % (self.r, flag, version)
|
|
|
|
return self.r
|
|
|
|
|
|
|
|
|
2007-09-12 14:16:08 +00:00
|
|
|
class PungiBase(object):
|
2007-08-15 23:19:13 +00:00
|
|
|
"""The base Pungi class. Set up config items and logging here"""
|
|
|
|
|
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
2015-02-12 00:13:23 +00:00
|
|
|
multilib.init(self.config.get('pungi', 'multilibconf'))
|
2007-08-27 14:02:03 +00:00
|
|
|
|
2012-11-12 14:59:02 +00:00
|
|
|
# ARCH setup
|
|
|
|
self.tree_arch = self.config.get('pungi', 'arch')
|
2012-12-18 12:38:26 +00:00
|
|
|
self.yum_arch = arch_module.tree_arch_to_yum_arch(self.tree_arch)
|
2012-11-12 14:59:02 +00:00
|
|
|
full_archlist = self.config.getboolean('pungi', 'full_archlist')
|
|
|
|
self.valid_arches = arch_module.get_valid_arches(self.tree_arch, multilib=full_archlist)
|
|
|
|
self.valid_arches.append("src") # throw source in there, filter it later
|
2012-11-13 08:31:07 +00:00
|
|
|
self.valid_native_arches = arch_module.get_valid_arches(self.tree_arch, multilib=False)
|
2012-11-12 14:59:02 +00:00
|
|
|
self.valid_multilib_arches = arch_module.get_valid_multilib_arches(self.tree_arch)
|
2007-08-15 23:19:13 +00:00
|
|
|
|
2012-09-25 20:15:35 +00:00
|
|
|
# arch: compatible arches
|
|
|
|
self.compatible_arches = {}
|
|
|
|
for i in self.valid_arches:
|
|
|
|
self.compatible_arches[i] = arch_module.get_compatible_arches(i)
|
|
|
|
|
2012-11-12 14:59:02 +00:00
|
|
|
self.doLoggerSetup()
|
2014-07-14 20:39:57 +00:00
|
|
|
self.workdir = os.path.join(self.config.get('pungi', 'workdirbase'),
|
2015-02-28 05:13:47 +00:00
|
|
|
self.config.get('pungi', 'variant'),
|
2012-11-12 14:59:02 +00:00
|
|
|
self.tree_arch)
|
2007-08-15 23:19:13 +00:00
|
|
|
|
|
|
|
def doLoggerSetup(self):
|
|
|
|
"""Setup our logger"""
|
|
|
|
|
2008-12-04 23:44:34 +00:00
|
|
|
logdir = os.path.join(self.config.get('pungi', 'destdir'), 'logs')
|
2007-08-15 23:19:13 +00:00
|
|
|
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(logdir, None, force=True) # Always allow logs to be written out
|
2007-08-15 23:19:13 +00:00
|
|
|
|
2015-02-28 05:13:47 +00:00
|
|
|
if self.config.get('pungi', 'variant'):
|
|
|
|
logfile = os.path.join(logdir, '%s.%s.log' % (self.config.get('pungi', 'variant'),
|
2012-11-12 14:59:02 +00:00
|
|
|
self.tree_arch))
|
2007-08-15 23:19:13 +00:00
|
|
|
else:
|
2012-11-12 14:59:02 +00:00
|
|
|
logfile = os.path.join(logdir, '%s.log' % (self.tree_arch))
|
2007-08-15 23:19:13 +00:00
|
|
|
|
|
|
|
# Create the root logger, that will log to our file
|
|
|
|
logging.basicConfig(level=logging.DEBUG,
|
|
|
|
format='%(name)s.%(levelname)s: %(message)s',
|
|
|
|
filename=logfile)
|
|
|
|
|
2007-08-25 00:45:54 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
class CallBack(urlgrabber.progress.TextMeter):
|
|
|
|
"""A call back function used with yum."""
|
|
|
|
|
2016-11-04 07:59:10 +00:00
|
|
|
def __init__(self, logger):
|
|
|
|
self.logger = logger
|
|
|
|
|
|
|
|
def start(self, filename=None, url=None, basename=None, size=None, now=None, text=None):
|
|
|
|
self.logger.info('Downloading %s (%sB)'
|
|
|
|
% (text, urlgrabber.progress.format_number(size)))
|
|
|
|
|
|
|
|
def update(self, amount_read, name=None):
|
|
|
|
return
|
|
|
|
|
|
|
|
def end(self, amount_read, now=None):
|
2008-06-12 13:00:43 +00:00
|
|
|
return
|
|
|
|
|
2008-06-12 15:36:47 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
class PungiYum(yum.YumBase):
|
|
|
|
"""Subclass of Yum"""
|
|
|
|
|
|
|
|
def __init__(self, config):
|
|
|
|
self.pungiconfig = config
|
|
|
|
yum.YumBase.__init__(self)
|
|
|
|
|
2008-08-29 17:55:25 +00:00
|
|
|
def doLoggingSetup(self, debuglevel, errorlevel, syslog_ident=None, syslog_facility=None):
|
2008-06-12 13:00:43 +00:00
|
|
|
"""Setup the logging facility."""
|
|
|
|
|
2008-12-04 23:44:34 +00:00
|
|
|
logdir = os.path.join(self.pungiconfig.get('pungi', 'destdir'), 'logs')
|
2008-06-12 13:00:43 +00:00
|
|
|
if not os.path.exists(logdir):
|
|
|
|
os.makedirs(logdir)
|
2015-02-28 05:13:47 +00:00
|
|
|
if self.pungiconfig.get('pungi', 'variant'):
|
|
|
|
logfile = os.path.join(logdir, '%s.%s.log' % (self.pungiconfig.get('pungi', 'variant'),
|
2008-12-04 23:44:34 +00:00
|
|
|
self.pungiconfig.get('pungi', 'arch')))
|
2008-06-12 13:00:43 +00:00
|
|
|
else:
|
2008-12-04 23:44:34 +00:00
|
|
|
logfile = os.path.join(logdir, '%s.log' % (self.pungiconfig.get('pungi', 'arch')))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
yum.logging.basicConfig(level=yum.logging.DEBUG, filename=logfile)
|
|
|
|
|
|
|
|
def doFileLogSetup(self, uid, logfile):
|
|
|
|
# This function overrides a yum function, allowing pungi to control
|
|
|
|
# the logging.
|
|
|
|
pass
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def _compare_providers(self, *args, **kwargs):
|
|
|
|
# HACK: always prefer 64bit over 32bit packages
|
|
|
|
result = yum.YumBase._compare_providers(self, *args, **kwargs)
|
|
|
|
if len(result) >= 2:
|
|
|
|
pkg1 = result[0][0]
|
|
|
|
pkg2 = result[1][0]
|
|
|
|
if pkg1.name == pkg2.name:
|
|
|
|
best_arch = self.arch.get_best_arch_from_list([pkg1.arch, pkg2.arch], self.arch.canonarch)
|
|
|
|
if best_arch != "noarch" and best_arch != pkg1.arch:
|
|
|
|
result[0:1] = result[0:1:-1]
|
|
|
|
return result
|
2008-06-12 15:36:47 +00:00
|
|
|
|
2019-05-31 07:14:26 +00:00
|
|
|
|
2015-02-05 15:56:24 +00:00
|
|
|
class Pungi(PungiBase):
|
2008-06-12 13:00:43 +00:00
|
|
|
def __init__(self, config, ksparser):
|
2015-02-05 15:56:24 +00:00
|
|
|
PungiBase.__init__(self, config)
|
2014-04-01 19:56:29 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# Set our own logging name space
|
|
|
|
self.logger = logging.getLogger('Pungi')
|
|
|
|
|
2014-04-01 19:56:29 +00:00
|
|
|
# Create a lock object for later use.
|
|
|
|
filename = self.config.get('pungi', 'cachedir') + "/yumlock"
|
|
|
|
lock = lockfile.LockFile(filename)
|
|
|
|
self.yumlock = ReentrantYumLock(lock, self.logger)
|
|
|
|
|
2016-11-04 08:01:58 +00:00
|
|
|
if not self.logger.handlers:
|
|
|
|
# Create the stdout/err streams and only send INFO+ stuff there
|
|
|
|
formatter = logging.Formatter('%(name)s:%(levelname)s: %(message)s')
|
|
|
|
console = logging.StreamHandler()
|
|
|
|
console.setFormatter(formatter)
|
|
|
|
console.setLevel(logging.INFO)
|
|
|
|
self.logger.addHandler(console)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2008-12-04 23:44:34 +00:00
|
|
|
self.destdir = self.config.get('pungi', 'destdir')
|
2008-06-12 13:00:43 +00:00
|
|
|
self.archdir = os.path.join(self.destdir,
|
2008-12-04 23:44:34 +00:00
|
|
|
self.config.get('pungi', 'version'),
|
2015-02-28 05:13:47 +00:00
|
|
|
self.config.get('pungi', 'variant'),
|
2012-11-12 14:59:02 +00:00
|
|
|
self.tree_arch)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
self.topdir = os.path.join(self.archdir, 'os')
|
2008-12-04 23:44:34 +00:00
|
|
|
self.isodir = os.path.join(self.archdir, self.config.get('pungi','isodir'))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(self.workdir, self.logger, force=True)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
self.common_files = []
|
2008-12-04 23:44:34 +00:00
|
|
|
self.infofile = os.path.join(self.config.get('pungi', 'destdir'),
|
|
|
|
self.config.get('pungi', 'version'),
|
2008-06-12 13:00:43 +00:00
|
|
|
'.composeinfo')
|
|
|
|
|
|
|
|
self.ksparser = ksparser
|
2012-12-18 12:38:26 +00:00
|
|
|
|
2016-10-17 08:28:46 +00:00
|
|
|
self.resolved_deps = {} # list the deps we've already resolved, short circuit
|
|
|
|
self.excluded_packages = set() # set of packages we've already excluded
|
|
|
|
self.multilib_blacklist = set() # set of packages we've already excluded through a multilib blacklist
|
2012-10-15 15:16:54 +00:00
|
|
|
self.seen_pkgs = {} # list the packages we've already seen so we can check all deps only once
|
2012-11-26 07:32:01 +00:00
|
|
|
self.multilib_methods = self.config.get('pungi', 'multilib').split(" ")
|
2013-06-20 10:18:13 +00:00
|
|
|
|
|
|
|
# greedy methods:
|
|
|
|
# * none: only best match package
|
|
|
|
# * all: all packages matching a provide
|
|
|
|
# * build: best match package + all other packages from the same SRPM having the same provide
|
|
|
|
self.greedy_method = self.config.get('pungi', 'greedy')
|
|
|
|
|
2012-10-25 12:47:19 +00:00
|
|
|
self.lookaside_repos = self.config.get('pungi', 'lookaside_repos').split(" ")
|
2012-09-25 20:15:35 +00:00
|
|
|
self.sourcerpm_arch_map = {} # {sourcerpm: set[arches]} - used for gathering debuginfo
|
2008-08-05 19:36:08 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
# package object lists
|
|
|
|
self.po_list = set()
|
|
|
|
self.srpm_po_list = set()
|
|
|
|
self.debuginfo_po_list = set()
|
|
|
|
|
|
|
|
# get_srpm_po() cache
|
|
|
|
self.sourcerpm_srpmpo_map = {}
|
|
|
|
|
2013-06-21 11:53:10 +00:00
|
|
|
# flags
|
2013-09-18 13:50:18 +00:00
|
|
|
self.input_packages = set() # packages specified in %packages kickstart section including those defined via comps groups
|
|
|
|
self.comps_packages = set() # packages specified in %packages kickstart section *indirectly* via comps groups
|
|
|
|
self.prepopulate_packages = set() # packages specified in %prepopulate kickstart section
|
2013-06-21 11:53:10 +00:00
|
|
|
self.fulltree_packages = set()
|
|
|
|
self.langpack_packages = set()
|
|
|
|
self.multilib_packages = set()
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
# already processed packages
|
|
|
|
self.completed_add_srpms = set() # srpms
|
|
|
|
self.completed_debuginfo = set() # rpms
|
|
|
|
self.completed_depsolve = set() # rpms
|
|
|
|
self.completed_langpacks = set() # rpms
|
|
|
|
self.completed_multilib = set() # rpms
|
|
|
|
self.completed_fulltree = set() # srpms
|
|
|
|
self.completed_selfhosting = set() # srpms
|
2013-06-20 10:18:13 +00:00
|
|
|
self.completed_greedy_build = set() # po.sourcerpm
|
2012-12-18 12:38:26 +00:00
|
|
|
|
|
|
|
self.is_fulltree = self.config.getboolean("pungi", "fulltree")
|
|
|
|
self.is_selfhosting = self.config.getboolean("pungi", "selfhosting")
|
|
|
|
self.is_sources = not self.config.getboolean("pungi", "nosource")
|
|
|
|
self.is_debuginfo = not self.config.getboolean("pungi", "nodebuginfo")
|
2013-10-27 18:58:24 +00:00
|
|
|
self.is_resolve_deps = self.config.getboolean("pungi", "resolve_deps")
|
2019-06-25 22:34:04 +00:00
|
|
|
self.is_nomacboot = self.config.getboolean("pungi", "nomacboot")
|
2012-12-18 12:38:26 +00:00
|
|
|
|
2013-01-15 13:05:04 +00:00
|
|
|
self.fulltree_excludes = set(self.ksparser.handler.fulltree_excludes)
|
|
|
|
|
2019-01-12 10:26:18 +00:00
|
|
|
# rootfs image size
|
|
|
|
self.rootfs_size = self.config.get('pungi', 'rootfs_size')
|
|
|
|
|
2011-04-29 06:22:12 +00:00
|
|
|
def _add_yum_repo(self, name, url, mirrorlist=False, groups=True,
|
2012-12-18 12:38:26 +00:00
|
|
|
cost=1000, includepkgs=None, excludepkgs=None,
|
2011-04-29 06:22:12 +00:00
|
|
|
proxy=None):
|
|
|
|
"""This function adds a repo to the yum object.
|
|
|
|
name: Name of the repo
|
|
|
|
url: Full url to the repo
|
|
|
|
mirrorlist: Bool for whether or not url is a mirrorlist
|
|
|
|
groups: Bool for whether or not to use groupdata from this repo
|
|
|
|
cost: an optional int representing the cost of a repo
|
|
|
|
includepkgs: An optional list of includes to use
|
|
|
|
excludepkgs: An optional list of excludes to use
|
|
|
|
proxy: An optional proxy to use
|
|
|
|
"""
|
2012-12-18 12:38:26 +00:00
|
|
|
includepkgs = includepkgs or []
|
|
|
|
excludepkgs = excludepkgs or []
|
2011-04-29 06:22:12 +00:00
|
|
|
|
|
|
|
self.logger.info('Adding repo %s' % name)
|
|
|
|
thisrepo = yum.yumRepo.YumRepository(name)
|
|
|
|
thisrepo.name = name
|
|
|
|
# add excludes and such here when pykickstart gets them
|
|
|
|
if mirrorlist:
|
|
|
|
thisrepo.mirrorlist = yum.parser.varReplace(url,
|
|
|
|
self.ayum.conf.yumvar)
|
|
|
|
self.mirrorlists.append(thisrepo.mirrorlist)
|
|
|
|
self.logger.info('Mirrorlist for repo %s is %s' %
|
|
|
|
(thisrepo.name, thisrepo.mirrorlist))
|
|
|
|
else:
|
|
|
|
thisrepo.baseurl = yum.parser.varReplace(url,
|
|
|
|
self.ayum.conf.yumvar)
|
|
|
|
self.repos.extend(thisrepo.baseurl)
|
|
|
|
self.logger.info('URL for repo %s is %s' %
|
|
|
|
(thisrepo.name, thisrepo.baseurl))
|
|
|
|
thisrepo.basecachedir = self.ayum.conf.cachedir
|
|
|
|
thisrepo.enablegroups = groups
|
|
|
|
# This is until yum uses this failover by default
|
|
|
|
thisrepo.failovermethod = 'priority'
|
|
|
|
thisrepo.exclude = excludepkgs
|
|
|
|
thisrepo.includepkgs = includepkgs
|
|
|
|
thisrepo.cost = cost
|
|
|
|
# Yum doesn't like proxy being None
|
|
|
|
if proxy:
|
|
|
|
thisrepo.proxy = proxy
|
|
|
|
self.ayum.repos.add(thisrepo)
|
|
|
|
self.ayum.repos.enableRepo(thisrepo.id)
|
|
|
|
self.ayum._getRepos(thisrepo=thisrepo.id, doSetup=True)
|
|
|
|
# Set the repo callback.
|
2016-11-04 07:59:10 +00:00
|
|
|
self.ayum.repos.setProgressBar(CallBack(logger=self.logger))
|
|
|
|
self.ayum.repos.callback = CallBack(logger=self.logger)
|
2011-04-29 06:22:12 +00:00
|
|
|
thisrepo.metadata_expire = 0
|
|
|
|
thisrepo.mirrorlist_expire = 0
|
|
|
|
if os.path.exists(os.path.join(thisrepo.cachedir, 'repomd.xml')):
|
|
|
|
os.remove(os.path.join(thisrepo.cachedir, 'repomd.xml'))
|
|
|
|
|
2014-04-01 19:56:29 +00:00
|
|
|
@yumlocked
|
2008-08-05 19:36:08 +00:00
|
|
|
def _inityum(self):
|
|
|
|
"""Initialize the yum object. Only needed for certain actions."""
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
# Create a yum object to use
|
2008-08-05 19:36:08 +00:00
|
|
|
self.repos = []
|
|
|
|
self.mirrorlists = []
|
2008-08-06 02:49:05 +00:00
|
|
|
self.ayum = PungiYum(self.config)
|
2008-06-12 13:00:43 +00:00
|
|
|
self.ayum.doLoggingSetup(6, 6)
|
|
|
|
yumconf = yum.config.YumConf()
|
|
|
|
yumconf.debuglevel = 6
|
|
|
|
yumconf.errorlevel = 6
|
2008-12-04 23:44:34 +00:00
|
|
|
yumconf.cachedir = self.config.get('pungi', 'cachedir')
|
2010-11-16 17:40:34 +00:00
|
|
|
yumconf.persistdir = "/var/lib/yum" # keep at default, gets appended to installroot
|
2008-06-12 13:00:43 +00:00
|
|
|
yumconf.installroot = os.path.join(self.workdir, 'yumroot')
|
|
|
|
yumconf.uid = os.geteuid()
|
|
|
|
yumconf.cache = 0
|
|
|
|
yumconf.failovermethod = 'priority'
|
2013-02-28 16:38:19 +00:00
|
|
|
yumconf.deltarpm = 0
|
2008-06-12 13:00:43 +00:00
|
|
|
yumvars = yum.config._getEnvVar()
|
2008-12-04 23:44:34 +00:00
|
|
|
yumvars['releasever'] = self.config.get('pungi', 'version')
|
2012-11-12 14:59:02 +00:00
|
|
|
yumvars['basearch'] = yum.rpmUtils.arch.getBaseArch(myarch=self.tree_arch)
|
2008-06-12 13:00:43 +00:00
|
|
|
yumconf.yumvar = yumvars
|
|
|
|
self.ayum._conf = yumconf
|
2010-06-29 22:58:55 +00:00
|
|
|
# I have no idea why this fixes a traceback, but James says it does.
|
|
|
|
del self.ayum.prerepoconf
|
2008-06-12 13:00:43 +00:00
|
|
|
self.ayum.repos.setCacheDir(self.ayum.conf.cachedir)
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
self.ayum.arch.setup_arch(self.yum_arch)
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# deal with our repos
|
|
|
|
try:
|
2008-08-06 02:49:05 +00:00
|
|
|
self.ksparser.handler.repo.methodToRepo()
|
2008-06-12 13:00:43 +00:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2008-08-06 02:49:05 +00:00
|
|
|
for repo in self.ksparser.handler.repo.repoList:
|
2008-06-12 13:00:43 +00:00
|
|
|
if repo.mirrorlist:
|
2011-04-29 06:22:12 +00:00
|
|
|
# The not bool() thing is because pykickstart is yes/no on
|
|
|
|
# whether to ignore groups, but yum is a yes/no on whether to
|
|
|
|
# include groups. Awkward.
|
|
|
|
self._add_yum_repo(repo.name, repo.mirrorlist,
|
|
|
|
mirrorlist=True,
|
|
|
|
groups=not bool(repo.ignoregroups),
|
|
|
|
cost=repo.cost,
|
|
|
|
includepkgs=repo.includepkgs,
|
|
|
|
excludepkgs=repo.excludepkgs,
|
|
|
|
proxy=repo.proxy)
|
2008-06-12 13:00:43 +00:00
|
|
|
else:
|
2011-04-29 06:22:12 +00:00
|
|
|
self._add_yum_repo(repo.name, repo.baseurl,
|
|
|
|
mirrorlist=False,
|
|
|
|
groups=not bool(repo.ignoregroups),
|
|
|
|
cost=repo.cost,
|
|
|
|
includepkgs=repo.includepkgs,
|
|
|
|
excludepkgs=repo.excludepkgs,
|
|
|
|
proxy=repo.proxy)
|
2012-11-12 14:59:02 +00:00
|
|
|
|
|
|
|
self.logger.info('Getting sacks for arches %s' % self.valid_arches)
|
|
|
|
self.ayum._getSacks(archlist=self.valid_arches)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2008-07-08 21:57:12 +00:00
|
|
|
def _filtersrcdebug(self, po):
|
2008-06-12 13:00:43 +00:00
|
|
|
"""Filter out package objects that are of 'src' arch."""
|
|
|
|
|
2017-07-27 06:50:21 +00:00
|
|
|
if po.arch == 'src' or pungi.util.pkg_is_debug(po):
|
2008-06-12 13:00:43 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2012-09-25 20:15:35 +00:00
|
|
|
def add_package(self, po, msg=None):
|
|
|
|
if not is_package(po):
|
|
|
|
raise ValueError("Not a binary package: %s" % po)
|
|
|
|
if msg:
|
|
|
|
self.logger.info(msg)
|
2012-12-18 12:38:26 +00:00
|
|
|
if po not in self.po_list:
|
|
|
|
self.po_list.add(po)
|
2012-09-25 20:15:35 +00:00
|
|
|
self.ayum.install(po)
|
|
|
|
self.sourcerpm_arch_map.setdefault(po.sourcerpm, set()).add(po.arch)
|
|
|
|
|
|
|
|
def add_debuginfo(self, po, msg=None):
|
2017-07-26 20:25:47 +00:00
|
|
|
if not pungi.util.pkg_is_debug(po):
|
2012-09-25 20:15:35 +00:00
|
|
|
raise ValueError("Not a debuginfog package: %s" % po)
|
|
|
|
if msg:
|
|
|
|
self.logger.info(msg)
|
2012-12-18 12:38:26 +00:00
|
|
|
if po not in self.debuginfo_po_list:
|
|
|
|
self.debuginfo_po_list.add(po)
|
2012-09-25 20:15:35 +00:00
|
|
|
|
|
|
|
def add_source(self, po, msg=None):
|
|
|
|
if not is_source(po):
|
|
|
|
raise ValueError("Not a source package: %s" % po)
|
|
|
|
if msg:
|
|
|
|
self.logger.info(msg)
|
2012-12-18 12:38:26 +00:00
|
|
|
if po not in self.srpm_po_list:
|
|
|
|
self.srpm_po_list.add(po)
|
2012-09-25 20:15:35 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def verifyCachePkg(self, po, path): # Stolen from yum
|
|
|
|
"""check the package checksum vs the cache
|
|
|
|
return True if pkg is good, False if not"""
|
|
|
|
|
|
|
|
(csum_type, csum) = po.returnIdSum()
|
|
|
|
|
|
|
|
try:
|
|
|
|
filesum = yum.misc.checksum(csum_type, path)
|
|
|
|
except yum.Errors.MiscError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if filesum != csum:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2016-10-17 08:28:46 +00:00
|
|
|
def expand_multilib_blacklist(self):
|
|
|
|
multilib_blacklist = self.ksparser.handler.multilib_blacklist
|
|
|
|
exactmatched, matched, unmatched = yum.packages.parsePackages(
|
|
|
|
self.all_pkgs, multilib_blacklist, casematch=1, pkgdict=self.pkg_refs.copy())
|
|
|
|
|
|
|
|
for i in sorted(unmatched):
|
|
|
|
self.logger.warning("Unmatched multilib blacklist pattern: %s" % i)
|
|
|
|
|
|
|
|
for pkg in exactmatched + matched:
|
|
|
|
if pkg.arch == "src":
|
|
|
|
continue
|
|
|
|
if pkg.arch not in self.valid_multilib_arches:
|
|
|
|
continue
|
|
|
|
|
|
|
|
found = None
|
|
|
|
for pattern in multilib_blacklist:
|
|
|
|
if fnmatch(pkg.name, pattern):
|
|
|
|
found = pattern
|
|
|
|
break
|
|
|
|
|
|
|
|
if found:
|
|
|
|
if pkg not in self.multilib_blacklist:
|
|
|
|
self.logger.info("Excluding %s.%s (multilib-blacklist pattern: %s)"
|
|
|
|
% (pkg.name, pkg.arch, found))
|
|
|
|
self.multilib_blacklist.add(pkg)
|
|
|
|
|
|
|
|
def expand_excluded_list(self):
|
|
|
|
excluded_list = []
|
|
|
|
multilib_excluded_list = []
|
2016-10-17 09:58:23 +00:00
|
|
|
source_excluded_list = []
|
2016-10-17 08:28:46 +00:00
|
|
|
|
|
|
|
for pattern in self.ksparser.handler.packages.excludedList:
|
|
|
|
if pattern.endswith(".+"):
|
|
|
|
multilib_excluded_list.append(pattern[:-2])
|
2016-10-17 09:58:23 +00:00
|
|
|
elif pattern.endswith(".src"):
|
|
|
|
source_excluded_list.append(pattern[:-4])
|
2016-10-17 08:28:46 +00:00
|
|
|
else:
|
|
|
|
excluded_list.append(pattern)
|
|
|
|
|
|
|
|
# native packages
|
|
|
|
exactmatched, matched, unmatched = yum.packages.parsePackages(
|
|
|
|
self.all_pkgs, excluded_list, casematch=1, pkgdict=self.pkg_refs.copy())
|
|
|
|
|
|
|
|
for i in sorted(unmatched):
|
|
|
|
self.logger.warning("Unmatched exclude: %s" % i)
|
|
|
|
|
|
|
|
for pkg in exactmatched + matched:
|
|
|
|
if pkg.arch == "src":
|
|
|
|
continue
|
2017-06-23 11:32:04 +00:00
|
|
|
if pkg.repoid in self.lookaside_repos:
|
|
|
|
# Don't exclude packages from lookaside
|
|
|
|
continue
|
2016-10-17 08:28:46 +00:00
|
|
|
|
|
|
|
found = None
|
|
|
|
for pattern in excluded_list:
|
|
|
|
if fnmatch(pkg.name, pattern):
|
|
|
|
found = pattern
|
|
|
|
break
|
|
|
|
|
|
|
|
if found:
|
|
|
|
if pkg not in self.excluded_packages:
|
|
|
|
self.logger.info("Excluding %s.%s (pattern: %s)"
|
|
|
|
% (pkg.name, pkg.arch, found))
|
|
|
|
self.excluded_packages.add(pkg)
|
|
|
|
|
|
|
|
# multilib packages
|
|
|
|
exactmatched, matched, unmatched = yum.packages.parsePackages(
|
|
|
|
self.all_pkgs, multilib_excluded_list, casematch=1, pkgdict=self.pkg_refs.copy())
|
|
|
|
|
|
|
|
for i in sorted(unmatched):
|
|
|
|
self.logger.warning("Unmatched multilib exclude: %s.+" % i)
|
|
|
|
|
|
|
|
for pkg in exactmatched + matched:
|
|
|
|
if pkg.arch == "src":
|
|
|
|
continue
|
|
|
|
if pkg.arch not in self.valid_multilib_arches:
|
|
|
|
continue
|
2017-06-23 11:32:04 +00:00
|
|
|
if pkg.repoid in self.lookaside_repos:
|
|
|
|
# Don't exclude packages from lookaside
|
|
|
|
continue
|
2016-10-17 08:28:46 +00:00
|
|
|
|
|
|
|
found = None
|
|
|
|
for pattern in multilib_excluded_list:
|
|
|
|
if fnmatch(pkg.name, pattern):
|
|
|
|
found = pattern
|
|
|
|
break
|
|
|
|
|
|
|
|
if found:
|
|
|
|
if pkg not in self.excluded_packages:
|
|
|
|
self.logger.info("Excluding %s.%s (pattern: %s.+)"
|
|
|
|
% (pkg.name, pkg.arch, found))
|
|
|
|
self.excluded_packages.add(pkg)
|
|
|
|
|
2016-10-17 09:58:23 +00:00
|
|
|
# source packages
|
|
|
|
exactmatched, matched, unmatched = yum.packages.parsePackages(
|
|
|
|
self.all_pkgs, source_excluded_list, casematch=1, pkgdict=self.pkg_refs.copy())
|
|
|
|
|
|
|
|
for i in sorted(unmatched):
|
|
|
|
self.logger.warning("Unmatched source exclude: %s.src" % i)
|
|
|
|
|
|
|
|
for pkg in exactmatched + matched:
|
|
|
|
if pkg.arch != "src":
|
|
|
|
continue
|
|
|
|
|
|
|
|
found = None
|
|
|
|
for pattern in source_excluded_list:
|
|
|
|
if fnmatch(pkg.name, pattern):
|
|
|
|
found = pattern
|
|
|
|
break
|
|
|
|
|
|
|
|
if found:
|
|
|
|
if pkg not in self.excluded_packages:
|
|
|
|
self.logger.info("Excluding %s.%s (pattern: %s.src)"
|
|
|
|
% (pkg.name, pkg.arch, found))
|
|
|
|
self.excluded_packages.add(pkg)
|
|
|
|
|
2012-03-05 11:10:17 +00:00
|
|
|
def excludePackages(self, pkg_sack):
|
|
|
|
"""exclude packages according to config file"""
|
|
|
|
if not pkg_sack:
|
|
|
|
return pkg_sack
|
|
|
|
|
2016-10-21 08:37:31 +00:00
|
|
|
result = []
|
|
|
|
for pkg in pkg_sack:
|
2016-10-17 08:28:46 +00:00
|
|
|
if pkg in self.multilib_blacklist:
|
|
|
|
continue
|
|
|
|
if pkg in self.excluded_packages:
|
|
|
|
continue
|
2016-10-21 08:37:31 +00:00
|
|
|
result.append(pkg)
|
2012-03-05 11:10:17 +00:00
|
|
|
|
2016-10-21 08:37:31 +00:00
|
|
|
return result
|
2012-03-05 11:10:17 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def get_package_deps(self, po):
|
2008-06-12 13:00:43 +00:00
|
|
|
"""Add the dependencies for a given package to the
|
|
|
|
transaction info"""
|
2012-12-18 12:38:26 +00:00
|
|
|
added = set()
|
2017-05-17 11:52:39 +00:00
|
|
|
if po.repoid in self.lookaside_repos:
|
|
|
|
# Don't resolve deps for stuff in lookaside.
|
|
|
|
return added
|
2012-12-18 12:38:26 +00:00
|
|
|
if po in self.completed_depsolve:
|
|
|
|
return added
|
|
|
|
self.completed_depsolve.add(po)
|
2012-10-15 15:16:54 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
self.logger.info('Checking deps of %s.%s' % (po.name, po.arch))
|
|
|
|
|
|
|
|
reqs = po.requires
|
|
|
|
provs = po.provides
|
2012-10-16 11:52:03 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
for req in reqs:
|
2012-11-13 12:11:57 +00:00
|
|
|
if req in self.resolved_deps:
|
2008-06-12 13:00:43 +00:00
|
|
|
continue
|
2012-11-13 12:11:57 +00:00
|
|
|
r, f, v = req
|
2008-06-12 13:00:43 +00:00
|
|
|
if r.startswith('rpmlib(') or r.startswith('config('):
|
|
|
|
continue
|
|
|
|
if req in provs:
|
|
|
|
continue
|
|
|
|
|
2012-11-13 12:11:57 +00:00
|
|
|
try:
|
2012-03-02 08:33:00 +00:00
|
|
|
deps = self.ayum.whatProvides(r, f, v).returnPackages()
|
2012-03-05 11:10:17 +00:00
|
|
|
deps = self.excludePackages(deps)
|
2012-03-02 08:33:00 +00:00
|
|
|
if not deps:
|
2018-08-24 06:36:56 +00:00
|
|
|
self.logger.warn(
|
|
|
|
"Unresolvable dependency %s in %s.%s"
|
|
|
|
% (Req(req), po.name, po.arch)
|
|
|
|
)
|
2012-03-02 08:33:00 +00:00
|
|
|
continue
|
|
|
|
|
2013-06-20 10:18:13 +00:00
|
|
|
if self.greedy_method == "all":
|
2012-11-13 12:11:57 +00:00
|
|
|
deps = yum.packageSack.ListPackageSack(deps).returnNewestByNameArch()
|
|
|
|
else:
|
2012-11-13 08:31:07 +00:00
|
|
|
found = False
|
|
|
|
for dep in deps:
|
2012-12-18 12:38:26 +00:00
|
|
|
if dep in self.po_list:
|
2013-06-20 10:18:13 +00:00
|
|
|
# HACK: there can be builds in the input list on which we want to apply the "build" greedy rules
|
|
|
|
if self.greedy_method == "build" and dep.sourcerpm not in self.completed_greedy_build:
|
|
|
|
break
|
2012-11-13 08:31:07 +00:00
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found:
|
|
|
|
deps = []
|
|
|
|
else:
|
2013-06-20 10:18:13 +00:00
|
|
|
all_deps = deps
|
|
|
|
deps = [self.ayum._bestPackageFromList(all_deps)]
|
|
|
|
if self.greedy_method == "build":
|
2013-08-06 14:40:22 +00:00
|
|
|
# handle "build" greedy method
|
2013-06-20 10:18:13 +00:00
|
|
|
if deps:
|
|
|
|
build_po = deps[0]
|
|
|
|
if is_package(build_po):
|
|
|
|
if build_po.arch != "noarch" and build_po.arch not in self.valid_multilib_arches:
|
|
|
|
all_deps = [ i for i in all_deps if i.arch not in self.valid_multilib_arches ]
|
|
|
|
for dep in all_deps:
|
|
|
|
if dep != build_po and dep.sourcerpm == build_po.sourcerpm:
|
|
|
|
deps.append(dep)
|
|
|
|
self.completed_greedy_build.add(dep.sourcerpm)
|
2012-03-02 08:33:00 +00:00
|
|
|
|
2012-11-13 12:11:57 +00:00
|
|
|
for dep in deps:
|
2012-10-15 15:03:14 +00:00
|
|
|
if dep not in added:
|
2017-04-26 07:46:35 +00:00
|
|
|
msg = 'Added %s.%s (repo: %s) for %s.%s (Requires: %s)' % (
|
|
|
|
dep.name, dep.arch, dep.repoid, po.name, po.arch, Req(req))
|
2012-09-25 20:15:35 +00:00
|
|
|
self.add_package(dep, msg)
|
2012-11-26 07:32:01 +00:00
|
|
|
added.add(dep)
|
2012-03-02 08:33:00 +00:00
|
|
|
|
2017-08-24 12:49:08 +00:00
|
|
|
except (yum.Errors.InstallError, yum.Errors.YumBaseError) as ex:
|
2013-03-25 09:52:53 +00:00
|
|
|
self.logger.warn("Unresolvable dependency %s in %s.%s (repo: %s)" % (r, po.name, po.arch, po.repoid))
|
2012-11-13 12:11:57 +00:00
|
|
|
continue
|
2008-06-12 13:00:43 +00:00
|
|
|
self.resolved_deps[req] = None
|
2012-11-13 12:11:57 +00:00
|
|
|
|
2017-04-20 13:28:11 +00:00
|
|
|
for add in sorted(added):
|
2012-12-18 12:38:26 +00:00
|
|
|
self.get_package_deps(add)
|
|
|
|
return added
|
|
|
|
|
|
|
|
def add_langpacks(self, po_list=None):
|
|
|
|
po_list = po_list or self.po_list
|
|
|
|
added = set()
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
for po in sorted(po_list):
|
|
|
|
if po in self.completed_langpacks:
|
|
|
|
continue
|
2012-10-16 11:52:03 +00:00
|
|
|
|
2012-11-26 07:32:01 +00:00
|
|
|
# get all langpacks matching the package name
|
|
|
|
langpacks = [ i for i in self.langpacks if i["name"] == po.name ]
|
|
|
|
if not langpacks:
|
|
|
|
continue
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
self.completed_langpacks.add(po)
|
|
|
|
|
2012-11-26 07:32:01 +00:00
|
|
|
for langpack in langpacks:
|
|
|
|
pattern = langpack["install"] % "*" # replace '%s' with '*'
|
2012-12-18 12:38:26 +00:00
|
|
|
exactmatched, matched, unmatched = yum.packages.parsePackages(self.all_pkgs, [pattern], casematch=1, pkgdict=self.pkg_refs.copy())
|
2012-11-26 07:32:01 +00:00
|
|
|
matches = filter(self._filtersrcdebug, exactmatched + matched)
|
|
|
|
matches = [ i for i in matches if not i.name.endswith("-devel") and not i.name.endswith("-static") and i.name != "man-pages-overrides" ]
|
|
|
|
matches = [ i for i in matches if fnmatch(i.name, pattern) ]
|
|
|
|
|
|
|
|
packages_by_name = {}
|
|
|
|
for i in matches:
|
|
|
|
packages_by_name.setdefault(i.name, []).append(i)
|
|
|
|
|
|
|
|
for i, pkg_sack in packages_by_name.iteritems():
|
|
|
|
pkg_sack = self.excludePackages(pkg_sack)
|
2016-10-17 08:28:46 +00:00
|
|
|
if not pkg_sack:
|
|
|
|
continue
|
2012-11-26 07:32:01 +00:00
|
|
|
match = self.ayum._bestPackageFromList(pkg_sack)
|
2013-03-25 09:52:53 +00:00
|
|
|
msg = 'Added langpack %s.%s (repo: %s) for package %s (pattern: %s)' % (match.name, match.arch, match.repoid, po.name, pattern)
|
2012-11-26 07:32:01 +00:00
|
|
|
self.add_package(match, msg)
|
2012-12-18 12:38:26 +00:00
|
|
|
self.completed_langpacks.add(match) # assuming langpack doesn't have langpacks
|
|
|
|
added.add(match)
|
2012-11-26 07:32:01 +00:00
|
|
|
|
|
|
|
return added
|
2012-10-16 11:52:03 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def add_multilib(self, po_list=None):
|
|
|
|
po_list = po_list or self.po_list
|
|
|
|
added = set()
|
2012-11-26 07:32:01 +00:00
|
|
|
|
|
|
|
if not self.multilib_methods:
|
|
|
|
return added
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
for po in sorted(po_list):
|
|
|
|
if po in self.completed_multilib:
|
|
|
|
continue
|
|
|
|
|
2012-11-26 07:32:01 +00:00
|
|
|
if po.arch in ("noarch", "src", "nosrc"):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if po.arch in self.valid_multilib_arches:
|
|
|
|
continue
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
self.completed_multilib.add(po)
|
|
|
|
|
2012-11-26 07:32:01 +00:00
|
|
|
matches = self.ayum.pkgSack.searchNevra(name=po.name, ver=po.version, rel=po.release)
|
|
|
|
matches = [i for i in matches if i.arch in self.valid_multilib_arches]
|
|
|
|
if not matches:
|
|
|
|
continue
|
|
|
|
matches = self.excludePackages(matches)
|
|
|
|
match = self.ayum._bestPackageFromList(matches)
|
|
|
|
if not match:
|
|
|
|
continue
|
2013-08-30 07:59:48 +00:00
|
|
|
|
2016-10-17 10:29:26 +00:00
|
|
|
found = False
|
|
|
|
for pattern in self.ksparser.handler.multilib_whitelist:
|
|
|
|
if fnmatch(po.name, pattern):
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found:
|
2013-08-30 07:59:48 +00:00
|
|
|
msg = "Added multilib package %s.%s (repo: %s) for package %s.%s (method: %s)" % (match.name, match.arch, match.repoid, po.name, po.arch, "multilib-whitelist")
|
|
|
|
self.add_package(match, msg)
|
|
|
|
self.completed_multilib.add(match)
|
|
|
|
added.add(match)
|
|
|
|
continue
|
|
|
|
|
2012-11-26 07:32:01 +00:00
|
|
|
method = multilib.po_is_multilib(po, self.multilib_methods)
|
|
|
|
if not method:
|
|
|
|
continue
|
2013-03-25 09:52:53 +00:00
|
|
|
msg = "Added multilib package %s.%s (repo: %s) for package %s.%s (method: %s)" % (match.name, match.arch, match.repoid, po.name, po.arch, method)
|
2012-11-26 07:32:01 +00:00
|
|
|
self.add_package(match, msg)
|
2012-12-18 12:38:26 +00:00
|
|
|
self.completed_multilib.add(match)
|
|
|
|
added.add(match)
|
2012-10-16 11:52:03 +00:00
|
|
|
return added
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def getPackagesFromGroup(self, group):
|
|
|
|
"""Get a list of package names from a ksparser group object
|
|
|
|
|
|
|
|
Returns a list of package names"""
|
|
|
|
|
|
|
|
packages = []
|
|
|
|
|
|
|
|
# Check if we have the group
|
|
|
|
if not self.ayum.comps.has_group(group.name):
|
|
|
|
self.logger.error("Group %s not found in comps!" % group)
|
|
|
|
return packages
|
|
|
|
|
|
|
|
# Get the group object to work with
|
|
|
|
groupobj = self.ayum.comps.return_group(group.name)
|
|
|
|
|
|
|
|
# Add the mandatory packages
|
|
|
|
packages.extend(groupobj.mandatory_packages.keys())
|
|
|
|
|
|
|
|
# Add the default packages unless we don't want them
|
|
|
|
if group.include == 1:
|
|
|
|
packages.extend(groupobj.default_packages.keys())
|
|
|
|
|
|
|
|
# Add the optional packages if we want them
|
|
|
|
if group.include == 2:
|
|
|
|
packages.extend(groupobj.default_packages.keys())
|
|
|
|
packages.extend(groupobj.optional_packages.keys())
|
|
|
|
|
|
|
|
# Deal with conditional packages
|
|
|
|
# Populate a dict with the name of the required package and value
|
|
|
|
# of the package objects it would bring in. To be used later if
|
|
|
|
# we match the conditional.
|
|
|
|
for condreq, cond in groupobj.conditional_packages.iteritems():
|
2012-12-18 12:38:26 +00:00
|
|
|
matches = self.ayum.pkgSack.searchNevra(name=condreq)
|
|
|
|
if matches:
|
2013-06-20 10:18:13 +00:00
|
|
|
if self.greedy_method != "all":
|
|
|
|
# works for both "none" and "build" greedy methods
|
2012-12-18 12:38:26 +00:00
|
|
|
matches = [self.ayum._bestPackageFromList(matches)]
|
|
|
|
self.ayum.tsInfo.conditionals.setdefault(cond, []).extend(matches)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
return packages
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def _addDefaultGroups(self, excludeGroups=None):
|
2008-10-03 22:32:06 +00:00
|
|
|
"""Cycle through the groups and return at list of the ones that ara
|
|
|
|
default."""
|
2012-12-18 12:38:26 +00:00
|
|
|
excludeGroups = excludeGroups or []
|
2008-10-03 22:32:06 +00:00
|
|
|
|
|
|
|
# This is mostly stolen from anaconda.
|
|
|
|
groups = map(lambda x: x.groupid,
|
|
|
|
filter(lambda x: x.default, self.ayum.comps.groups))
|
2010-06-11 15:29:31 +00:00
|
|
|
|
2010-06-11 14:37:36 +00:00
|
|
|
groups = [x for x in groups if x not in excludeGroups]
|
2010-06-11 15:29:31 +00:00
|
|
|
|
2008-10-03 22:32:06 +00:00
|
|
|
self.logger.debug('Add default groups %s' % groups)
|
|
|
|
return groups
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def get_langpacks(self):
|
2012-10-16 11:52:03 +00:00
|
|
|
try:
|
|
|
|
self.langpacks = list(self.ayum.comps.langpacks)
|
|
|
|
except AttributeError:
|
|
|
|
# old yum
|
|
|
|
self.logger.warning("Could not get langpacks via yum.comps. You may need to update yum.")
|
|
|
|
self.langpacks = []
|
|
|
|
except yum.Errors.GroupsError:
|
|
|
|
# no groups or no comps at all
|
|
|
|
self.logger.warning("Could not get langpacks due to missing comps in repodata or --ignoregroups=true option.")
|
|
|
|
self.langpacks = []
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def getPackageObjects(self):
|
|
|
|
"""Cycle through the list of packages and get package object matches."""
|
|
|
|
|
|
|
|
searchlist = [] # The list of package names/globs to search for
|
|
|
|
excludeGroups = [] # A list of groups for removal defined in the ks file
|
|
|
|
|
|
|
|
# precompute pkgs and pkg_refs to speed things up
|
2013-03-25 10:22:37 +00:00
|
|
|
self.all_pkgs = list(set(self.ayum.pkgSack.returnPackages()))
|
2016-10-17 08:28:46 +00:00
|
|
|
self.pkg_refs = yum.packages.buildPkgRefDict(self.all_pkgs, casematch=True)
|
|
|
|
self.expand_excluded_list()
|
|
|
|
self.expand_multilib_blacklist()
|
2013-03-25 10:22:37 +00:00
|
|
|
self.all_pkgs = self.excludePackages(self.all_pkgs)
|
|
|
|
|
|
|
|
lookaside_nvrs = set()
|
|
|
|
for po in self.all_pkgs:
|
|
|
|
if po.repoid in self.lookaside_repos:
|
|
|
|
lookaside_nvrs.add(po.nvra)
|
2016-10-17 10:00:44 +00:00
|
|
|
all_pkgs = [] # building a new list is cheaper than deleting from existing
|
|
|
|
for po in sorted(self.all_pkgs):
|
2013-03-25 10:22:37 +00:00
|
|
|
if po.repoid not in self.lookaside_repos and po.nvra in lookaside_nvrs:
|
2016-10-17 10:00:44 +00:00
|
|
|
self.logger.info("Removed %s (repo: %s), because it's also in a lookaside repo"
|
|
|
|
% (po, po.repoid))
|
|
|
|
self.excluded_packages.add(po)
|
|
|
|
else:
|
|
|
|
all_pkgs.append(po)
|
|
|
|
self.all_pkgs = all_pkgs
|
2013-03-25 10:22:37 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
self.get_langpacks()
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# First remove the excludes
|
|
|
|
self.ayum.excludePackages()
|
2012-12-06 13:40:39 +00:00
|
|
|
|
2010-06-11 14:37:36 +00:00
|
|
|
# Get the groups set for removal
|
|
|
|
for group in self.ksparser.handler.packages.excludedGroupList:
|
|
|
|
excludeGroups.append(str(group)[1:])
|
|
|
|
|
2012-12-06 13:40:39 +00:00
|
|
|
if "core" in [ i.groupid for i in self.ayum.comps.groups ]:
|
|
|
|
if "core" not in [ i.name for i in self.ksparser.handler.packages.groupList ]:
|
|
|
|
self.logger.warning("The @core group is no longer added by default; Please add @core to the kickstart if you want it in.")
|
|
|
|
|
|
|
|
if "base" in [ i.groupid for i in self.ayum.comps.groups ]:
|
|
|
|
if "base" not in [ i.name for i in self.ksparser.handler.packages.groupList ]:
|
|
|
|
if self.ksparser.handler.packages.addBase:
|
|
|
|
self.logger.warning("The --nobase kickstart option is no longer supported; Please add @base to the kickstart if you want it in.")
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2008-10-03 22:32:06 +00:00
|
|
|
# Check to see if we want all the defaults
|
|
|
|
if self.ksparser.handler.packages.default:
|
2010-06-11 14:37:36 +00:00
|
|
|
for group in self._addDefaultGroups(excludeGroups):
|
2008-10-03 22:32:06 +00:00
|
|
|
self.ksparser.handler.packages.add(['@%s' % group])
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# Get a list of packages from groups
|
2013-09-18 13:50:18 +00:00
|
|
|
comps_package_names = set()
|
2008-06-12 13:00:43 +00:00
|
|
|
for group in self.ksparser.handler.packages.groupList:
|
2013-09-18 13:50:18 +00:00
|
|
|
comps_package_names.update(self.getPackagesFromGroup(group))
|
|
|
|
searchlist.extend(sorted(comps_package_names))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2013-09-18 13:50:18 +00:00
|
|
|
# Add packages
|
2008-06-12 13:00:43 +00:00
|
|
|
searchlist.extend(self.ksparser.handler.packages.packageList)
|
2013-09-18 13:50:18 +00:00
|
|
|
input_packages = searchlist[:]
|
|
|
|
|
|
|
|
# Add prepopulate packages
|
|
|
|
prepopulate_packages = self.ksparser.handler.prepopulate
|
|
|
|
searchlist.extend(prepopulate_packages)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
# Make the search list unique
|
|
|
|
searchlist = yum.misc.unique(searchlist)
|
|
|
|
|
2012-11-13 12:11:57 +00:00
|
|
|
for name in searchlist:
|
|
|
|
pattern = name
|
|
|
|
multilib = False
|
2016-11-03 11:57:58 +00:00
|
|
|
orig_name = name
|
2012-11-13 12:11:57 +00:00
|
|
|
if name.endswith(".+"):
|
|
|
|
name = name[:-2]
|
|
|
|
multilib = True
|
2012-03-02 08:33:00 +00:00
|
|
|
|
2013-06-20 10:18:13 +00:00
|
|
|
if self.greedy_method == "all" and name == "system-release":
|
2012-11-13 12:25:57 +00:00
|
|
|
# HACK: handles a special case, when system-release virtual provide is specified in the greedy mode
|
|
|
|
matches = self.ayum.whatProvides(name, None, None).returnPackages()
|
|
|
|
else:
|
2012-12-18 12:38:26 +00:00
|
|
|
exactmatched, matched, unmatched = yum.packages.parsePackages(self.all_pkgs, [name], casematch=1, pkgdict=self.pkg_refs.copy())
|
2012-11-13 12:25:57 +00:00
|
|
|
matches = exactmatched + matched
|
|
|
|
|
|
|
|
matches = filter(self._filtersrcdebug, matches)
|
2012-03-02 08:33:00 +00:00
|
|
|
|
2013-06-20 10:18:13 +00:00
|
|
|
if multilib and self.greedy_method != "all":
|
2012-11-13 12:11:57 +00:00
|
|
|
matches = [ po for po in matches if po.arch in self.valid_multilib_arches ]
|
2012-03-02 08:33:00 +00:00
|
|
|
|
2012-11-13 12:11:57 +00:00
|
|
|
if not matches:
|
|
|
|
self.logger.warn('Could not find a match for %s in any configured repo' % pattern)
|
|
|
|
continue
|
2012-09-25 20:47:14 +00:00
|
|
|
|
2012-11-13 12:11:57 +00:00
|
|
|
packages_by_name = {}
|
|
|
|
for po in matches:
|
|
|
|
packages_by_name.setdefault(po.name, []).append(po)
|
|
|
|
|
|
|
|
for name, packages in packages_by_name.iteritems():
|
2013-01-15 13:05:04 +00:00
|
|
|
packages = self.excludePackages(packages or [])
|
2013-03-25 09:52:53 +00:00
|
|
|
if not packages:
|
|
|
|
continue
|
2013-06-20 10:18:13 +00:00
|
|
|
if self.greedy_method == "all":
|
2012-11-13 12:11:57 +00:00
|
|
|
packages = yum.packageSack.ListPackageSack(packages).returnNewestByNameArch()
|
|
|
|
else:
|
2013-06-20 10:18:13 +00:00
|
|
|
# works for both "none" and "build" greedy methods
|
2012-11-13 12:11:57 +00:00
|
|
|
packages = [self.ayum._bestPackageFromList(packages)]
|
|
|
|
|
2016-11-03 11:57:58 +00:00
|
|
|
if orig_name in input_packages:
|
2013-09-18 13:50:18 +00:00
|
|
|
self.input_packages.update(packages)
|
|
|
|
if name in comps_package_names:
|
|
|
|
self.comps_packages.update(packages)
|
2013-06-21 11:53:10 +00:00
|
|
|
|
2012-11-13 12:11:57 +00:00
|
|
|
for po in packages:
|
|
|
|
msg = 'Found %s.%s' % (po.name, po.arch)
|
|
|
|
self.add_package(po, msg)
|
2013-09-18 13:50:18 +00:00
|
|
|
name_arch = "%s.%s" % (po.name, po.arch)
|
|
|
|
if name_arch in prepopulate_packages:
|
|
|
|
self.prepopulate_packages.add(po)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
self.logger.info('Finished gathering package objects.')
|
|
|
|
|
|
|
|
def gather(self):
|
|
|
|
|
|
|
|
# get package objects according to the input list
|
|
|
|
self.getPackageObjects()
|
2014-04-29 18:36:03 +00:00
|
|
|
if self.is_sources:
|
|
|
|
self.createSourceHashes()
|
2012-12-18 12:38:26 +00:00
|
|
|
|
|
|
|
pass_num = 0
|
|
|
|
added = set()
|
|
|
|
while 1:
|
|
|
|
if pass_num > 0 and not added:
|
|
|
|
break
|
|
|
|
added = set()
|
|
|
|
pass_num += 1
|
|
|
|
self.logger.info("Pass #%s" % pass_num)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2013-03-22 14:12:19 +00:00
|
|
|
if self.is_resolve_deps:
|
|
|
|
# get conditional deps (defined in comps)
|
|
|
|
for txmbr in self.ayum.tsInfo:
|
|
|
|
if not txmbr.po in self.po_list:
|
2013-06-20 10:45:42 +00:00
|
|
|
if not is_package(txmbr.po):
|
|
|
|
# we don't want sources which can be pulled in, because 'src' arch is part of self.valid_arches
|
|
|
|
continue
|
2015-09-10 13:38:09 +00:00
|
|
|
if not txmbr.isDep:
|
|
|
|
continue
|
2013-03-22 14:12:19 +00:00
|
|
|
self.add_package(txmbr.po)
|
2012-12-18 12:38:26 +00:00
|
|
|
|
|
|
|
# resolve deps
|
|
|
|
if self.is_resolve_deps:
|
|
|
|
for po in sorted(self.po_list):
|
|
|
|
added.update(self.get_package_deps(po))
|
|
|
|
|
2014-04-29 18:36:03 +00:00
|
|
|
if self.is_sources:
|
|
|
|
added_srpms = self.add_srpms()
|
|
|
|
added.update(added_srpms)
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
if self.is_selfhosting:
|
|
|
|
for srpm_po in sorted(added_srpms):
|
|
|
|
added.update(self.get_package_deps(srpm_po))
|
|
|
|
|
|
|
|
if self.is_fulltree:
|
2013-06-21 11:53:10 +00:00
|
|
|
new = self.add_fulltree()
|
|
|
|
self.fulltree_packages.update(new)
|
|
|
|
self.fulltree_packages.update([ self.sourcerpm_srpmpo_map[i.sourcerpm] for i in new ])
|
|
|
|
added.update(new)
|
2012-12-18 12:38:26 +00:00
|
|
|
if added:
|
|
|
|
continue
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
# add langpacks
|
2013-06-21 11:53:10 +00:00
|
|
|
new = self.add_langpacks(self.po_list)
|
|
|
|
self.langpack_packages.update(new)
|
2014-04-29 18:36:03 +00:00
|
|
|
if self.is_sources:
|
|
|
|
self.langpack_packages.update([ self.sourcerpm_srpmpo_map[i.sourcerpm] for i in new ])
|
2013-06-21 11:53:10 +00:00
|
|
|
added.update(new)
|
2012-12-18 12:38:26 +00:00
|
|
|
if added:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# add multilib packages
|
2013-06-21 11:53:10 +00:00
|
|
|
new = self.add_multilib(self.po_list)
|
|
|
|
self.multilib_packages.update(new)
|
|
|
|
self.multilib_packages.update([ self.sourcerpm_srpmpo_map[i.sourcerpm] for i in new ])
|
|
|
|
added.update(new)
|
2012-12-18 12:38:26 +00:00
|
|
|
if added:
|
|
|
|
continue
|
|
|
|
|
|
|
|
def get_srpm_po(self, po):
|
|
|
|
"""Given a package object, get a package object for the corresponding source rpm."""
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
# return srpm_po from cache if available
|
|
|
|
srpm_po = self.sourcerpm_srpmpo_map.get(po.sourcerpm, None)
|
|
|
|
if srpm_po is not None:
|
|
|
|
return srpm_po
|
|
|
|
|
|
|
|
# arch can be "src" or "nosrc"
|
2012-12-11 12:32:50 +00:00
|
|
|
nvr, arch, _ = po.sourcerpm.rsplit(".", 2)
|
|
|
|
name, ver, rel = nvr.rsplit('-', 2)
|
2012-12-18 12:38:26 +00:00
|
|
|
|
|
|
|
# ... but even "nosrc" packages are stored as "src" in repodata
|
|
|
|
srpm_po_list = self.ayum.pkgSack.searchNevra(name=name, ver=ver, rel=rel, arch="src")
|
2016-10-17 09:58:23 +00:00
|
|
|
srpm_po_list = self.excludePackages(srpm_po_list)
|
2016-12-05 08:18:25 +00:00
|
|
|
try:
|
|
|
|
srpm_po = srpm_po_list[0]
|
|
|
|
except IndexError:
|
|
|
|
self.logger.warning("Cannot find a source rpm for %s" % po.sourcerpm)
|
|
|
|
srpm_po = None
|
2012-12-18 12:38:26 +00:00
|
|
|
self.sourcerpm_srpmpo_map[po.sourcerpm] = srpm_po
|
|
|
|
return srpm_po
|
2009-04-03 19:53:53 +00:00
|
|
|
|
2009-04-03 19:53:57 +00:00
|
|
|
def createSourceHashes(self):
|
|
|
|
"""Create two dicts - one that maps binary POs to source POs, and
|
|
|
|
one that maps a single source PO to all binary POs it produces.
|
|
|
|
Requires yum still configured."""
|
|
|
|
self.src_by_bin = {}
|
|
|
|
self.bin_by_src = {}
|
|
|
|
self.logger.info("Generating source <-> binary package mappings")
|
2012-12-18 12:38:26 +00:00
|
|
|
for po in self.all_pkgs:
|
|
|
|
if is_source(po):
|
2009-04-03 19:53:57 +00:00
|
|
|
continue
|
2016-12-05 08:18:25 +00:00
|
|
|
srpmpo = self.get_srpm_po(po)
|
2014-04-01 19:48:59 +00:00
|
|
|
|
2009-04-03 19:53:57 +00:00
|
|
|
self.src_by_bin[po] = srpmpo
|
2016-12-05 08:18:25 +00:00
|
|
|
self.bin_by_src.setdefault(srpmpo, []).append(po)
|
2014-04-01 19:48:59 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def add_srpms(self, po_list=None):
|
2008-06-12 13:00:43 +00:00
|
|
|
"""Cycle through the list of package objects and
|
|
|
|
find the sourcerpm for them. Requires yum still
|
|
|
|
configured and a list of package objects"""
|
2009-04-03 19:53:54 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
srpms = set()
|
|
|
|
po_list = po_list or self.po_list
|
|
|
|
for po in sorted(po_list):
|
2015-06-10 15:03:57 +00:00
|
|
|
try:
|
|
|
|
srpm_po = self.sourcerpm_srpmpo_map[po.sourcerpm]
|
|
|
|
except KeyError:
|
|
|
|
self.logger.error("Cannot get source RPM '%s' for %s" % (po.sourcerpm, po.nvra))
|
|
|
|
srpm_po = None
|
|
|
|
|
|
|
|
if srpm_po is None:
|
2012-12-18 12:38:26 +00:00
|
|
|
continue
|
2013-06-21 11:53:10 +00:00
|
|
|
|
|
|
|
# flags
|
|
|
|
if po in self.input_packages:
|
|
|
|
self.input_packages.add(srpm_po)
|
|
|
|
if po in self.fulltree_packages:
|
|
|
|
self.fulltree_packages.add(srpm_po)
|
|
|
|
if po in self.langpack_packages:
|
|
|
|
self.langpack_packages.add(srpm_po)
|
|
|
|
if po in self.multilib_packages:
|
|
|
|
self.multilib_packages.add(srpm_po)
|
|
|
|
|
2015-06-10 15:03:57 +00:00
|
|
|
if srpm_po in self.completed_add_srpms:
|
|
|
|
continue
|
|
|
|
|
|
|
|
msg = "Added source package %s.%s (repo: %s)" % (srpm_po.name, srpm_po.arch, srpm_po.repoid)
|
|
|
|
self.add_source(srpm_po, msg)
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
self.completed_add_srpms.add(srpm_po)
|
|
|
|
srpms.add(srpm_po)
|
|
|
|
return srpms
|
|
|
|
|
|
|
|
def add_fulltree(self, srpm_po_list=None):
|
2009-04-03 19:53:58 +00:00
|
|
|
"""Cycle through all package objects, and add any
|
|
|
|
that correspond to a source rpm that we are including.
|
|
|
|
Requires yum still configured and a list of package
|
|
|
|
objects."""
|
2012-12-18 12:38:26 +00:00
|
|
|
|
|
|
|
self.logger.info("Completing package set")
|
|
|
|
|
|
|
|
srpm_po_list = srpm_po_list or self.srpm_po_list
|
|
|
|
srpms = []
|
|
|
|
for srpm_po in srpm_po_list:
|
|
|
|
if srpm_po in self.completed_fulltree:
|
|
|
|
continue
|
2013-01-15 13:05:04 +00:00
|
|
|
if srpm_po.name not in self.fulltree_excludes:
|
|
|
|
srpms.append(srpm_po)
|
2012-12-18 12:38:26 +00:00
|
|
|
self.completed_fulltree.add(srpm_po)
|
|
|
|
|
|
|
|
added = set()
|
|
|
|
for srpm_po in srpms:
|
2017-05-17 11:52:39 +00:00
|
|
|
if srpm_po.repoid in self.lookaside_repos:
|
|
|
|
# Don't run fulltree on packages in lookaside
|
|
|
|
continue
|
2012-12-18 12:38:26 +00:00
|
|
|
include_native = False
|
|
|
|
include_multilib = False
|
|
|
|
has_native = False
|
|
|
|
has_multilib = False
|
|
|
|
|
|
|
|
for po in self.excludePackages(self.bin_by_src[srpm_po]):
|
|
|
|
if not is_package(po):
|
|
|
|
continue
|
|
|
|
if po.arch == "noarch":
|
|
|
|
continue
|
|
|
|
if po not in self.po_list:
|
|
|
|
# process only already included packages
|
2012-11-13 08:31:07 +00:00
|
|
|
if po.arch in self.valid_multilib_arches:
|
2012-12-18 12:38:26 +00:00
|
|
|
has_multilib = True
|
2012-11-13 08:31:07 +00:00
|
|
|
elif po.arch in self.valid_native_arches:
|
2012-12-18 12:38:26 +00:00
|
|
|
has_native = True
|
|
|
|
continue
|
2016-11-03 11:57:58 +00:00
|
|
|
if po.arch in self.valid_multilib_arches and (po in self.input_packages or self.greedy_method == "all"):
|
2012-12-18 12:38:26 +00:00
|
|
|
include_multilib = True
|
|
|
|
elif po.arch in self.valid_native_arches:
|
|
|
|
include_native = True
|
|
|
|
|
|
|
|
# XXX: this is very fragile!
|
|
|
|
# Do not make any changes unless you really know what you're doing!
|
|
|
|
if not include_native:
|
|
|
|
# if there's no native package already pulled in...
|
|
|
|
if has_native and not include_multilib:
|
|
|
|
# include all native packages, but only if we're not pulling multilib already
|
|
|
|
# SCENARIO: a noarch package was already pulled in and there are x86_64 and i686 packages -> we want x86_64 in to complete the package set
|
|
|
|
include_native = True
|
|
|
|
elif has_multilib:
|
|
|
|
# SCENARIO: a noarch package was already pulled in and there are no x86_64 packages; we want i686 in to complete the package set
|
|
|
|
include_multilib = True
|
|
|
|
|
|
|
|
for po in self.excludePackages(self.bin_by_src[srpm_po]):
|
|
|
|
if not is_package(po):
|
|
|
|
continue
|
|
|
|
if po in self.po_list:
|
|
|
|
continue
|
|
|
|
if po.arch != "noarch":
|
|
|
|
if po.arch in self.valid_multilib_arches:
|
|
|
|
if not include_multilib:
|
|
|
|
continue
|
|
|
|
if po.arch in self.valid_native_arches:
|
|
|
|
if not include_native:
|
|
|
|
continue
|
2013-03-25 09:52:53 +00:00
|
|
|
msg = "Added %s.%s (repo: %s) to complete package set" % (po.name, po.arch, po.repoid)
|
2012-12-18 12:38:26 +00:00
|
|
|
self.add_package(po, msg)
|
|
|
|
return added
|
2012-09-25 20:15:35 +00:00
|
|
|
|
2008-07-08 21:58:24 +00:00
|
|
|
def getDebuginfoList(self):
|
|
|
|
"""Cycle through the list of package objects and find
|
|
|
|
debuginfo rpms for them. Requires yum still
|
|
|
|
configured and a list of package objects"""
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
added = set()
|
|
|
|
for po in self.all_pkgs:
|
2017-07-26 20:25:47 +00:00
|
|
|
if not pungi.util.pkg_is_debug(po):
|
2012-09-25 20:15:35 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if po.sourcerpm not in self.sourcerpm_arch_map:
|
|
|
|
# TODO: print a warning / throw an error
|
|
|
|
continue
|
2016-11-07 13:43:04 +00:00
|
|
|
if po.arch != 'noarch' and not (set(self.compatible_arches[po.arch]) &
|
|
|
|
set(self.sourcerpm_arch_map[po.sourcerpm]) -
|
|
|
|
set(["noarch"])):
|
|
|
|
# skip all incompatible arches unless it's a noarch debuginfo
|
2012-09-25 20:15:35 +00:00
|
|
|
# this pulls i386 debuginfo for a i686 package for example
|
|
|
|
continue
|
2013-03-25 09:52:53 +00:00
|
|
|
msg = 'Added debuginfo %s.%s (repo: %s)' % (po.name, po.arch, po.repoid)
|
2012-09-25 20:15:35 +00:00
|
|
|
self.add_debuginfo(po, msg)
|
2013-06-21 11:53:10 +00:00
|
|
|
|
|
|
|
# flags
|
2016-12-07 13:15:46 +00:00
|
|
|
try:
|
|
|
|
srpm_po = self.sourcerpm_srpmpo_map[po.sourcerpm]
|
|
|
|
except:
|
|
|
|
self.logger.warning('Failed to find source for %s', po.sourcerpm)
|
|
|
|
srpm_po = None
|
2013-06-21 11:53:10 +00:00
|
|
|
if srpm_po in self.input_packages:
|
|
|
|
self.input_packages.add(po)
|
|
|
|
if srpm_po in self.fulltree_packages:
|
|
|
|
self.fulltree_packages.add(po)
|
|
|
|
if srpm_po in self.langpack_packages:
|
|
|
|
self.langpack_packages.add(po)
|
|
|
|
if srpm_po in self.multilib_packages:
|
|
|
|
self.multilib_packages.add(po)
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
added.add(po)
|
|
|
|
return added
|
2008-07-08 21:58:24 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def _downloadPackageList(self, polist, relpkgdir):
|
|
|
|
"""Cycle through the list of package objects and
|
|
|
|
download them from their respective repos."""
|
|
|
|
|
2017-06-07 13:07:07 +00:00
|
|
|
for pkg in sorted(polist):
|
|
|
|
repo = self.ayum.repos.getRepo(pkg.repoid)
|
|
|
|
self.logger.info("Downloading %s.%s from %s",
|
|
|
|
pkg.name, pkg.arch, repo.baseurl or repo.mirrorlist)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2008-12-04 23:44:34 +00:00
|
|
|
pkgdir = os.path.join(self.config.get('pungi', 'destdir'),
|
|
|
|
self.config.get('pungi', 'version'),
|
2015-02-28 05:13:47 +00:00
|
|
|
self.config.get('pungi', 'variant'),
|
2008-06-12 13:00:43 +00:00
|
|
|
relpkgdir)
|
|
|
|
|
|
|
|
# Ensure the pkgdir exists, force if requested, and make sure we clean it out
|
|
|
|
if relpkgdir.endswith('SRPMS'):
|
|
|
|
# Since we share source dirs with other arches don't clean, but do allow us to use it
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(pkgdir, self.logger, force=True, clean=False)
|
2008-06-12 13:00:43 +00:00
|
|
|
else:
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(pkgdir, self.logger, force=self.config.getboolean('pungi', 'force'), clean=True)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
probs = self.ayum.downloadPkgs(polist)
|
|
|
|
|
|
|
|
if len(probs.keys()) > 0:
|
|
|
|
self.logger.error("Errors were encountered while downloading packages.")
|
|
|
|
for key in probs.keys():
|
|
|
|
errors = yum.misc.unique(probs[key])
|
|
|
|
for error in errors:
|
|
|
|
self.logger.error("%s: %s" % (key, error))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
for po in polist:
|
|
|
|
basename = os.path.basename(po.relativepath)
|
|
|
|
|
|
|
|
local = po.localPkg()
|
2012-04-16 19:54:24 +00:00
|
|
|
if self.config.getboolean('pungi', 'nohash'):
|
2012-02-09 15:12:55 +00:00
|
|
|
target = os.path.join(pkgdir, basename)
|
|
|
|
else:
|
|
|
|
target = os.path.join(pkgdir, po.name[0].lower(), basename)
|
2012-04-16 19:54:24 +00:00
|
|
|
# Make sure we have the hashed dir available to link into we only want dirs there to corrospond to packages
|
|
|
|
# that we are including so we can not just do A-Z 0-9
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(os.path.join(pkgdir, po.name[0].lower()), self.logger, force=True, clean=False)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
# Link downloaded package in (or link package from file repo)
|
|
|
|
try:
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._link(local, target, self.logger, force=True)
|
2008-06-12 13:00:43 +00:00
|
|
|
continue
|
|
|
|
except:
|
|
|
|
self.logger.error("Unable to link %s from the yum cache." % po.name)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
self.logger.info('Finished downloading packages.')
|
|
|
|
|
2014-04-01 19:56:29 +00:00
|
|
|
@yumlocked
|
2008-06-12 13:00:43 +00:00
|
|
|
def downloadPackages(self):
|
|
|
|
"""Download the package objects obtained in getPackageObjects()."""
|
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
self._downloadPackageList(self.po_list,
|
2012-11-12 14:59:02 +00:00
|
|
|
os.path.join(self.tree_arch,
|
2008-12-04 23:44:34 +00:00
|
|
|
self.config.get('pungi', 'osdir'),
|
|
|
|
self.config.get('pungi', 'product_path')))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
def makeCompsFile(self):
|
|
|
|
"""Gather any comps files we can from repos and merge them into one."""
|
|
|
|
|
2015-02-28 05:13:47 +00:00
|
|
|
ourcompspath = os.path.join(self.workdir, '%s-%s-comps.xml' % (self.config.get('pungi', 'family'), self.config.get('pungi', 'version')))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2012-12-18 21:57:09 +00:00
|
|
|
# Filter out things we don't include
|
|
|
|
ourgroups = []
|
|
|
|
for item in self.ksparser.handler.packages.groupList:
|
|
|
|
g = self.ayum.comps.return_group(item.name)
|
|
|
|
if g:
|
|
|
|
ourgroups.append(g.groupid)
|
|
|
|
allgroups = [g.groupid for g in self.ayum.comps.get_groups()]
|
|
|
|
for group in allgroups:
|
|
|
|
if group not in ourgroups and not self.ayum.comps.return_group(group).langonly:
|
|
|
|
self.logger.info('Removing extra group %s from comps file' % (group,))
|
|
|
|
del self.ayum.comps._groups[group]
|
|
|
|
|
|
|
|
groups = [g.groupid for g in self.ayum.comps.get_groups()]
|
|
|
|
envs = self.ayum.comps.get_environments()
|
|
|
|
for env in envs:
|
|
|
|
for group in env.groups:
|
|
|
|
if group not in groups:
|
|
|
|
self.logger.info('Removing incomplete environment %s from comps file' % (env,))
|
|
|
|
del self.ayum.comps._environments[env.environmentid]
|
|
|
|
break
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2012-12-18 21:57:09 +00:00
|
|
|
ourcomps = open(ourcompspath, 'w')
|
2008-06-12 13:00:43 +00:00
|
|
|
ourcomps.write(self.ayum.comps.xml())
|
|
|
|
ourcomps.close()
|
|
|
|
|
|
|
|
# Disable this until https://bugzilla.redhat.com/show_bug.cgi?id=442097 is fixed.
|
|
|
|
# Run the xslt filter over our comps file
|
|
|
|
#compsfilter = ['/usr/bin/xsltproc', '--novalid']
|
|
|
|
#compsfilter.append('-o')
|
|
|
|
#compsfilter.append(ourcompspath)
|
|
|
|
#compsfilter.append('/usr/share/pungi/comps-cleanup.xsl')
|
|
|
|
#compsfilter.append(ourcompspath)
|
|
|
|
|
2015-02-05 15:56:24 +00:00
|
|
|
#pungi.util._doRunCommand(compsfilter, self.logger)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2014-04-01 19:56:29 +00:00
|
|
|
@yumlocked
|
2008-06-12 13:00:43 +00:00
|
|
|
def downloadSRPMs(self):
|
|
|
|
"""Cycle through the list of srpms and
|
|
|
|
find the package objects for them, Then download them."""
|
|
|
|
|
|
|
|
# do the downloads
|
2012-12-18 12:38:26 +00:00
|
|
|
self._downloadPackageList(self.srpm_po_list, os.path.join('source', 'SRPMS'))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2014-04-01 19:56:29 +00:00
|
|
|
@yumlocked
|
2008-07-08 21:59:31 +00:00
|
|
|
def downloadDebuginfo(self):
|
|
|
|
"""Cycle through the list of debuginfo rpms and
|
|
|
|
download them."""
|
|
|
|
|
|
|
|
# do the downloads
|
2012-12-18 12:38:26 +00:00
|
|
|
self._downloadPackageList(self.debuginfo_po_list, os.path.join(self.tree_arch, 'debug'))
|
2008-07-08 21:59:31 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def _list_packages(self, po_list):
|
2011-08-18 10:29:10 +00:00
|
|
|
"""Cycle through the list of packages and return their paths."""
|
2013-06-21 11:53:10 +00:00
|
|
|
result = []
|
|
|
|
for po in po_list:
|
|
|
|
if po.repoid in self.lookaside_repos:
|
|
|
|
continue
|
|
|
|
|
|
|
|
flags = []
|
|
|
|
|
|
|
|
# input
|
|
|
|
if po in self.input_packages:
|
|
|
|
flags.append("input")
|
|
|
|
|
2013-09-18 13:50:18 +00:00
|
|
|
# comps
|
|
|
|
if po in self.comps_packages:
|
|
|
|
flags.append("comps")
|
|
|
|
|
|
|
|
# prepopulate
|
|
|
|
if po in self.prepopulate_packages:
|
|
|
|
flags.append("prepopulate")
|
|
|
|
|
2013-06-21 11:53:10 +00:00
|
|
|
# langpack
|
|
|
|
if po in self.langpack_packages:
|
|
|
|
flags.append("langpack")
|
|
|
|
|
|
|
|
# multilib
|
|
|
|
if po in self.multilib_packages:
|
|
|
|
flags.append("multilib")
|
|
|
|
|
|
|
|
# fulltree
|
|
|
|
if po in self.fulltree_packages:
|
|
|
|
flags.append("fulltree")
|
|
|
|
|
|
|
|
# fulltree-exclude
|
|
|
|
if is_source(po):
|
|
|
|
srpm_name = po.name
|
|
|
|
else:
|
|
|
|
srpm_name = po.sourcerpm.rsplit("-", 2)[0]
|
|
|
|
if srpm_name in self.fulltree_excludes:
|
|
|
|
flags.append("fulltree-exclude")
|
|
|
|
|
|
|
|
result.append({
|
|
|
|
"path": os.path.join(po.basepath or "", po.relativepath),
|
|
|
|
"flags": sorted(flags),
|
|
|
|
})
|
|
|
|
result.sort(lambda x, y: cmp(x["path"], y["path"]))
|
2012-12-18 12:38:26 +00:00
|
|
|
return result
|
2011-08-18 10:29:10 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def list_packages(self):
|
2011-08-18 10:29:10 +00:00
|
|
|
"""Cycle through the list of RPMs and return their paths."""
|
2012-12-18 12:38:26 +00:00
|
|
|
return self._list_packages(self.po_list)
|
2011-08-18 10:29:10 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def list_srpms(self):
|
2011-08-18 10:29:10 +00:00
|
|
|
"""Cycle through the list of SRPMs and return their paths."""
|
2012-12-18 12:38:26 +00:00
|
|
|
return self._list_packages(self.srpm_po_list)
|
2011-08-18 10:29:10 +00:00
|
|
|
|
2012-12-18 12:38:26 +00:00
|
|
|
def list_debuginfo(self):
|
2011-08-18 10:29:10 +00:00
|
|
|
"""Cycle through the list of DEBUGINFO RPMs and return their paths."""
|
2012-12-18 12:38:26 +00:00
|
|
|
return self._list_packages(self.debuginfo_po_list)
|
|
|
|
|
|
|
|
def _size_packages(self, po_list):
|
|
|
|
return sum([ po.size for po in po_list if po.repoid not in self.lookaside_repos ])
|
|
|
|
|
|
|
|
def size_packages(self):
|
|
|
|
return self._size_packages(self.po_list)
|
|
|
|
|
|
|
|
def size_srpms(self):
|
|
|
|
return self._size_packages(self.srpm_po_list)
|
|
|
|
|
|
|
|
def size_debuginfo(self):
|
|
|
|
return self._size_packages(self.debuginfo_po_list)
|
2011-08-18 10:29:10 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def writeinfo(self, line):
|
|
|
|
"""Append a line to the infofile in self.infofile"""
|
|
|
|
f=open(self.infofile, "a+")
|
|
|
|
f.write(line.strip() + "\n")
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
def mkrelative(self, subfile):
|
|
|
|
"""Return the relative path for 'subfile' underneath the version dir."""
|
|
|
|
|
2008-12-04 23:44:34 +00:00
|
|
|
basedir = os.path.join(self.destdir, self.config.get('pungi', 'version'))
|
2008-06-12 13:00:43 +00:00
|
|
|
if subfile.startswith(basedir):
|
|
|
|
return subfile.replace(basedir + os.path.sep, '')
|
2018-12-30 16:47:24 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def _makeMetadata(self, path, cachedir, comps=False, repoview=False, repoviewtitle=False,
|
2014-04-28 19:56:34 +00:00
|
|
|
baseurl=False, output=False, basedir=False, update=True,
|
|
|
|
compress_type=None):
|
2008-06-12 13:00:43 +00:00
|
|
|
"""Create repodata and repoview."""
|
2018-12-30 16:47:24 +00:00
|
|
|
|
|
|
|
# Define outputdir
|
2008-06-12 13:00:43 +00:00
|
|
|
if output:
|
2018-12-30 16:47:24 +00:00
|
|
|
outputdir = output
|
2008-06-12 13:00:43 +00:00
|
|
|
else:
|
2018-12-30 16:47:24 +00:00
|
|
|
outputdir = path
|
|
|
|
|
|
|
|
# Define revision if SOURCE_DATE_EPOCH exists in env
|
2018-10-04 21:44:06 +00:00
|
|
|
if 'SOURCE_DATE_EPOCH' in os.environ:
|
2018-12-30 16:47:24 +00:00
|
|
|
revision = os.environ['SOURCE_DATE_EPOCH']
|
|
|
|
else:
|
|
|
|
revision = None
|
|
|
|
|
|
|
|
createrepo_wrapper = CreaterepoWrapper(createrepo_c=True)
|
|
|
|
createrepo = createrepo_wrapper.get_createrepo_cmd(directory=path, update=update, outputdir=outputdir,
|
|
|
|
unique_md_filenames=True, database=True, groupfile=comps,
|
|
|
|
basedir=basedir, baseurl=baseurl, revision=revision,
|
|
|
|
compress_type=compress_type)
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
self.logger.info('Making repodata')
|
2018-12-30 16:47:24 +00:00
|
|
|
pungi.util._doRunCommand(createrepo, self.logger)
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
if repoview:
|
|
|
|
# setup the repoview call
|
|
|
|
repoview = ['/usr/bin/repoview']
|
|
|
|
repoview.append('--quiet')
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
repoview.append('--state-dir')
|
|
|
|
repoview.append(os.path.join(cachedir, 'repoviewcache'))
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
if repoviewtitle:
|
|
|
|
repoview.append('--title')
|
|
|
|
repoview.append(repoviewtitle)
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
repoview.append(path)
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# run the command
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._doRunCommand(repoview, self.logger)
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def doCreaterepo(self, comps=True):
|
|
|
|
"""Run createrepo to generate repodata in the tree."""
|
|
|
|
compsfile = None
|
|
|
|
if comps:
|
2015-02-28 05:13:47 +00:00
|
|
|
compsfile = os.path.join(self.workdir, '%s-%s-comps.xml' % (self.config.get('pungi', 'family'), self.config.get('pungi', 'version')))
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# setup the cache dirs
|
|
|
|
for target in ['createrepocache', 'repoviewcache']:
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(os.path.join(self.config.get('pungi', 'cachedir'),
|
2008-06-12 13:00:43 +00:00
|
|
|
target),
|
|
|
|
self.logger,
|
|
|
|
force=True)
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2015-02-28 05:13:47 +00:00
|
|
|
repoviewtitle = '%s %s - %s' % (self.config.get('pungi', 'family'),
|
2008-12-04 23:44:34 +00:00
|
|
|
self.config.get('pungi', 'version'),
|
2012-11-12 14:59:02 +00:00
|
|
|
self.tree_arch)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2008-12-04 23:44:34 +00:00
|
|
|
cachedir = self.config.get('pungi', 'cachedir')
|
2014-04-28 19:56:34 +00:00
|
|
|
compress_type = self.config.get('pungi', 'compress_type')
|
2008-07-08 22:00:01 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# setup the createrepo call
|
2014-04-28 19:56:34 +00:00
|
|
|
self._makeMetadata(self.topdir, cachedir, compsfile,
|
|
|
|
repoview=True, repoviewtitle=repoviewtitle,
|
|
|
|
compress_type=compress_type)
|
2008-07-08 22:00:01 +00:00
|
|
|
|
|
|
|
# create repodata for debuginfo
|
2008-12-04 23:44:34 +00:00
|
|
|
if self.config.getboolean('pungi', 'debuginfo'):
|
2008-07-08 22:00:01 +00:00
|
|
|
path = os.path.join(self.archdir, 'debug')
|
2008-07-16 17:31:17 +00:00
|
|
|
if not os.path.isdir(path):
|
2012-11-12 14:59:02 +00:00
|
|
|
self.logger.debug("No debuginfo for %s" % self.tree_arch)
|
2008-07-16 17:31:17 +00:00
|
|
|
return
|
2014-04-28 19:56:34 +00:00
|
|
|
self._makeMetadata(path, cachedir, repoview=False,
|
|
|
|
compress_type=compress_type)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2014-07-30 16:13:40 +00:00
|
|
|
def _shortenVolID(self):
|
|
|
|
"""shorten the volume id to make sure its under 32 characters"""
|
|
|
|
|
2014-12-09 17:51:30 +00:00
|
|
|
substitutions = {'Workstation': 'WS',
|
2014-07-30 16:13:40 +00:00
|
|
|
'Server': 'S',
|
|
|
|
'Cloud': 'C',
|
|
|
|
'Alpha': 'A',
|
|
|
|
'Beta': 'B',
|
|
|
|
'TC': 'T'}
|
2015-05-20 20:12:31 +00:00
|
|
|
if self.config.get('pungi', 'variant'):
|
2015-05-20 21:33:54 +00:00
|
|
|
name = '%s-%s' % (self.config.get('pungi', 'family'), self.config.get('pungi', 'variant'))
|
2015-05-20 20:12:31 +00:00
|
|
|
else:
|
|
|
|
name = self.config.get('pungi', 'family')
|
2014-07-30 16:13:40 +00:00
|
|
|
version = self.config.get('pungi', 'version')
|
|
|
|
arch = self.tree_arch
|
|
|
|
|
2014-12-09 17:51:30 +00:00
|
|
|
for k, v in substitutions.iteritems():
|
2014-07-31 09:23:17 +00:00
|
|
|
if k in name:
|
|
|
|
name = name.replace(k, v)
|
|
|
|
if k in version:
|
|
|
|
version = version.replace(k, v)
|
2014-07-30 16:13:40 +00:00
|
|
|
volid = "%s-%s-%s" % (name, version, arch)
|
|
|
|
if len(volid) > 32:
|
2014-07-31 09:23:17 +00:00
|
|
|
raise RuntimeError("Volume ID %s is longer than 32 characters" % volid)
|
2014-07-30 16:13:40 +00:00
|
|
|
else:
|
|
|
|
return volid
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def doBuildinstall(self):
|
2010-12-22 09:34:07 +00:00
|
|
|
"""Run lorax on the tree."""
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd = ["lorax"]
|
|
|
|
cmd.extend(["--workdir", self.workdir])
|
2015-03-14 17:20:44 +00:00
|
|
|
cmd.extend(["--logfile", os.path.join(self.config.get('pungi', 'destdir'), 'logs/lorax-%s.log' % (self.config.get('pungi', 'arch')))])
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2015-02-12 00:13:22 +00:00
|
|
|
try:
|
|
|
|
# Convert url method to a repo
|
|
|
|
self.ksparser.handler.repo.methodToRepo()
|
|
|
|
except:
|
|
|
|
pass
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2015-02-12 00:13:22 +00:00
|
|
|
for repo in self.ksparser.handler.repo.repoList:
|
|
|
|
if repo.mirrorlist:
|
|
|
|
# The not bool() thing is because pykickstart is yes/no on
|
|
|
|
# whether to ignore groups, but yum is a yes/no on whether to
|
|
|
|
# include groups. Awkward.
|
2016-10-07 05:50:52 +00:00
|
|
|
repo.mirrorlist = yum.parser.varReplace(repo.mirrorlist, self.ayum.conf.yumvar)
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.extend(["--mirrorlist", repo.mirrorlist])
|
|
|
|
else:
|
2016-10-07 05:50:52 +00:00
|
|
|
repo.baseurl = yum.parser.varReplace(repo.baseurl, self.ayum.conf.yumvar)
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.extend(["--source", repo.baseurl])
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2015-02-12 00:13:22 +00:00
|
|
|
# Add the repo in the destdir to our yum object
|
|
|
|
cmd.extend(["--source", "file://%s" % self.topdir])
|
|
|
|
cmd.extend(["--product", self.config.get('pungi', 'family')])
|
|
|
|
cmd.extend(["--version", self.config.get('pungi', 'version')])
|
|
|
|
cmd.extend(["--release", "%s %s" % (self.config.get('pungi', 'family'), self.config.get('pungi', 'version'))])
|
|
|
|
if self.config.get('pungi', 'variant'):
|
|
|
|
cmd.extend(["--variant", self.config.get('pungi', 'variant')])
|
|
|
|
cmd.extend(["--bugurl", self.config.get('pungi', 'bugurl')])
|
2015-05-20 20:14:39 +00:00
|
|
|
if self.config.getboolean('pungi', 'isfinal'):
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.append("--isfinal")
|
|
|
|
cmd.extend(["--volid", self._shortenVolID()])
|
2008-06-13 16:56:45 +00:00
|
|
|
|
2012-12-21 15:25:26 +00:00
|
|
|
# on ppc64 we need to tell lorax to only use ppc64 packages so that the media will run on all 64 bit ppc boxes
|
2013-08-20 16:33:44 +00:00
|
|
|
if self.tree_arch == 'ppc64':
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.extend(["--buildarch", "ppc64"])
|
2014-03-25 20:35:40 +00:00
|
|
|
elif self.tree_arch == 'ppc64le':
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.extend(["--buildarch", "ppc64le"])
|
2012-12-21 15:25:26 +00:00
|
|
|
|
2013-08-08 14:32:05 +00:00
|
|
|
# Only supported mac hardware is x86 make sure we only enable mac support on arches that need it
|
2019-06-25 22:34:04 +00:00
|
|
|
if self.tree_arch in ['x86_64'] and not self.is_nomacboot:
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.append("--macboot")
|
2013-08-08 14:32:05 +00:00
|
|
|
else:
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.append("--nomacboot")
|
2013-08-08 14:32:05 +00:00
|
|
|
|
2014-12-08 23:04:10 +00:00
|
|
|
try:
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.extend(["--conf", self.config.get('lorax', 'conf_file')])
|
2014-12-08 23:04:10 +00:00
|
|
|
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
2015-02-12 00:13:22 +00:00
|
|
|
pass
|
2010-12-22 09:34:07 +00:00
|
|
|
|
2014-12-08 23:04:12 +00:00
|
|
|
try:
|
2015-02-12 00:13:22 +00:00
|
|
|
cmd.extend(["--installpkgs", self.config.get('lorax', 'installpkgs')])
|
2014-12-08 23:04:12 +00:00
|
|
|
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
2015-02-12 00:13:22 +00:00
|
|
|
pass
|
|
|
|
|
2019-01-12 10:26:18 +00:00
|
|
|
if self.rootfs_size != "False":
|
|
|
|
cmd.extend(["--rootfs-size", self.rootfs_size])
|
|
|
|
|
2015-02-12 00:13:22 +00:00
|
|
|
# Allow the output directory to exist.
|
|
|
|
cmd.append("--force")
|
|
|
|
|
|
|
|
# MUST be last in the list
|
|
|
|
cmd.append(self.topdir)
|
2014-12-08 23:04:12 +00:00
|
|
|
|
2015-02-12 00:13:22 +00:00
|
|
|
self.logger.info(" ".join(cmd))
|
2015-03-12 21:12:38 +00:00
|
|
|
pungi.util._doRunCommand(cmd, self.logger)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
# write out the tree data for snake
|
|
|
|
self.writeinfo('tree: %s' % self.mkrelative(self.topdir))
|
|
|
|
|
2008-07-15 19:53:11 +00:00
|
|
|
# Write out checksums for verifytree
|
|
|
|
# First open the treeinfo file so that we can config parse it
|
|
|
|
treeinfofile = os.path.join(self.topdir, '.treeinfo')
|
|
|
|
|
|
|
|
try:
|
|
|
|
treefile = open(treeinfofile, 'r')
|
|
|
|
except IOError:
|
|
|
|
self.logger.error("Could not read .treeinfo file: %s" % treefile)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# Create a ConfigParser object out of the contents so that we can
|
|
|
|
# write it back out later and not worry about formatting
|
2018-10-08 12:06:06 +00:00
|
|
|
treeinfo = SortedConfigParser()
|
2008-07-15 19:53:11 +00:00
|
|
|
treeinfo.readfp(treefile)
|
|
|
|
treefile.close()
|
|
|
|
treeinfo.add_section('checksums')
|
|
|
|
|
|
|
|
# Create a function to use with os.path.walk to sum the files
|
|
|
|
# basepath is used to make the sum output relative
|
|
|
|
sums = []
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-07-15 19:53:11 +00:00
|
|
|
def getsum(basepath, dir, files):
|
|
|
|
for file in files:
|
|
|
|
path = os.path.join(dir, file)
|
|
|
|
# don't bother summing directories. Won't work.
|
|
|
|
if os.path.isdir(path):
|
|
|
|
continue
|
2015-02-05 15:56:24 +00:00
|
|
|
sum = pungi.util._doCheckSum(path, 'sha256', self.logger)
|
2008-07-15 19:53:11 +00:00
|
|
|
outpath = path.replace(basepath, '')
|
|
|
|
sums.append((outpath, sum))
|
|
|
|
|
|
|
|
# Walk the os/images path to get sums of all the files
|
2008-08-04 20:57:05 +00:00
|
|
|
os.path.walk(os.path.join(self.topdir, 'images'), getsum, self.topdir + '/')
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2008-08-05 18:34:37 +00:00
|
|
|
# Capture PPC images
|
2014-03-25 20:35:40 +00:00
|
|
|
if self.tree_arch in ['ppc', 'ppc64', 'ppc64le']:
|
2008-08-05 18:34:37 +00:00
|
|
|
os.path.walk(os.path.join(self.topdir, 'ppc'), getsum, self.topdir + '/')
|
2008-07-15 19:53:11 +00:00
|
|
|
|
|
|
|
# Get a checksum of repomd.xml since it has within it sums for other files
|
|
|
|
repomd = os.path.join(self.topdir, 'repodata', 'repomd.xml')
|
2015-02-05 15:56:24 +00:00
|
|
|
sum = pungi.util._doCheckSum(repomd, 'sha256', self.logger)
|
2008-07-15 19:53:11 +00:00
|
|
|
sums.append((os.path.join('repodata', 'repomd.xml'), sum))
|
|
|
|
|
|
|
|
# Now add the sums, and write the config out
|
|
|
|
try:
|
|
|
|
treefile = open(treeinfofile, 'w')
|
|
|
|
except IOError:
|
|
|
|
self.logger.error("Could not open .treeinfo for writing: %s" % treefile)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
for path, sum in sums:
|
|
|
|
treeinfo.set('checksums', path, sum)
|
|
|
|
|
2018-12-31 11:29:51 +00:00
|
|
|
# Extract name of kernel images
|
|
|
|
pr = re.compile('images-(.*)')
|
|
|
|
images = []
|
|
|
|
for img in treeinfo.sections():
|
|
|
|
if pr.match(img):
|
|
|
|
images.append(pr.match(img).group(1))
|
|
|
|
|
|
|
|
# Extract information from pre-productmd treeinfos 'general' section
|
|
|
|
name = treeinfo.get('general', 'family')
|
|
|
|
version = treeinfo.get('general', 'version')
|
|
|
|
arch = treeinfo.get('general', 'arch')
|
|
|
|
platforms = ','.join(images)
|
|
|
|
timestamp = int(float(treeinfo.get('general', 'timestamp')))
|
|
|
|
|
|
|
|
# Set/modify 'general' section
|
|
|
|
treeinfo.set('general', 'variant', name)
|
|
|
|
treeinfo.set('general', 'timestamp', timestamp)
|
|
|
|
treeinfo.set('general', 'packagedir', 'Packages')
|
|
|
|
treeinfo.set('general', 'repository', '.')
|
|
|
|
treeinfo.set('general', 'platforms', platforms)
|
|
|
|
|
|
|
|
# Add 'header' section
|
|
|
|
treeinfo.add_section('header')
|
|
|
|
treeinfo.set('header', 'version', '1.0')
|
|
|
|
|
|
|
|
# Add 'release' section
|
|
|
|
treeinfo.add_section('release')
|
|
|
|
treeinfo.set('release', 'name', name)
|
|
|
|
treeinfo.set('release', 'short', name)
|
|
|
|
treeinfo.set('release', 'version', version)
|
|
|
|
|
|
|
|
# Add 'tree' section
|
|
|
|
treeinfo.add_section('tree')
|
|
|
|
treeinfo.set('tree', 'arch', arch)
|
|
|
|
treeinfo.set('tree', 'build_timestamp', timestamp)
|
|
|
|
treeinfo.set('tree', 'platforms', platforms)
|
|
|
|
treeinfo.set('tree', 'variants', name)
|
|
|
|
|
|
|
|
# Add 'variant-VARIANTNAME' section
|
|
|
|
variant_section_name = 'variant-' + name
|
|
|
|
treeinfo.add_section(variant_section_name)
|
|
|
|
treeinfo.set(variant_section_name, 'id', name)
|
|
|
|
treeinfo.set(variant_section_name, 'name', name)
|
|
|
|
treeinfo.set(variant_section_name, 'packages', 'Packages')
|
|
|
|
treeinfo.set(variant_section_name, 'repository', '.')
|
|
|
|
treeinfo.set(variant_section_name, 'type', 'variant')
|
|
|
|
treeinfo.set(variant_section_name, 'uid', name)
|
|
|
|
|
2008-07-15 19:53:11 +00:00
|
|
|
treeinfo.write(treefile)
|
|
|
|
treefile.close()
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
def doGetRelnotes(self):
|
|
|
|
"""Get extra files from packages in the tree to put in the topdir of
|
|
|
|
the tree."""
|
|
|
|
docsdir = os.path.join(self.workdir, 'docs')
|
2008-12-04 23:44:34 +00:00
|
|
|
relnoterpms = self.config.get('pungi', 'relnotepkgs').split()
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
fileres = []
|
2008-12-04 23:44:34 +00:00
|
|
|
for pattern in self.config.get('pungi', 'relnotefilere').split():
|
2008-06-12 13:00:43 +00:00
|
|
|
fileres.append(re.compile(pattern))
|
|
|
|
|
|
|
|
dirres = []
|
2008-12-04 23:44:34 +00:00
|
|
|
for pattern in self.config.get('pungi', 'relnotedirre').split():
|
2008-06-12 13:00:43 +00:00
|
|
|
dirres.append(re.compile(pattern))
|
|
|
|
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(docsdir, self.logger, force=self.config.getboolean('pungi', 'force'), clean=True)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
# Expload the packages we list as relnote packages
|
2008-12-04 23:44:34 +00:00
|
|
|
pkgs = os.listdir(os.path.join(self.topdir, self.config.get('pungi', 'product_path')))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
rpm2cpio = ['/usr/bin/rpm2cpio']
|
|
|
|
cpio = ['cpio', '-imud']
|
|
|
|
|
|
|
|
for pkg in pkgs:
|
|
|
|
pkgname = pkg.rsplit('-', 2)[0]
|
|
|
|
for relnoterpm in relnoterpms:
|
|
|
|
if pkgname == relnoterpm:
|
2008-12-04 23:44:34 +00:00
|
|
|
extraargs = [os.path.join(self.topdir, self.config.get('pungi', 'product_path'), pkg)]
|
2008-06-12 13:00:43 +00:00
|
|
|
try:
|
|
|
|
p1 = subprocess.Popen(rpm2cpio + extraargs, cwd=docsdir, stdout=subprocess.PIPE)
|
|
|
|
(out, err) = subprocess.Popen(cpio, cwd=docsdir, stdin=p1.stdout, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE, universal_newlines=True).communicate()
|
|
|
|
except:
|
|
|
|
self.logger.error("Got an error from rpm2cpio")
|
|
|
|
self.logger.error(err)
|
|
|
|
raise
|
|
|
|
|
|
|
|
if out:
|
|
|
|
self.logger.debug(out)
|
|
|
|
|
|
|
|
# Walk the tree for our files
|
|
|
|
for dirpath, dirname, filelist in os.walk(docsdir):
|
|
|
|
for filename in filelist:
|
|
|
|
for regex in fileres:
|
|
|
|
if regex.match(filename) and not os.path.exists(os.path.join(self.topdir, filename)):
|
|
|
|
self.logger.info("Linking release note file %s" % filename)
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._link(os.path.join(dirpath, filename),
|
2010-06-09 15:27:09 +00:00
|
|
|
os.path.join(self.topdir, filename),
|
|
|
|
self.logger,
|
|
|
|
force=self.config.getboolean('pungi',
|
|
|
|
'force'))
|
2008-06-12 13:00:43 +00:00
|
|
|
self.common_files.append(filename)
|
|
|
|
|
|
|
|
# Walk the tree for our dirs
|
|
|
|
for dirpath, dirname, filelist in os.walk(docsdir):
|
|
|
|
for directory in dirname:
|
|
|
|
for regex in dirres:
|
|
|
|
if regex.match(directory) and not os.path.exists(os.path.join(self.topdir, directory)):
|
|
|
|
self.logger.info("Copying release note dir %s" % directory)
|
|
|
|
shutil.copytree(os.path.join(dirpath, directory), os.path.join(self.topdir, directory))
|
2019-05-31 07:31:01 +00:00
|
|
|
|
2009-02-10 22:49:59 +00:00
|
|
|
def _doIsoChecksum(self, path, csumfile):
|
|
|
|
"""Simple function to wrap creating checksums of iso files."""
|
2008-07-15 03:27:40 +00:00
|
|
|
|
|
|
|
try:
|
2009-02-10 22:49:59 +00:00
|
|
|
checkfile = open(csumfile, 'a')
|
2008-07-15 03:27:40 +00:00
|
|
|
except IOError:
|
2009-02-10 22:49:59 +00:00
|
|
|
self.logger.error("Could not open checksum file: %s" % csumfile)
|
2008-07-15 03:27:40 +00:00
|
|
|
|
2009-02-10 22:49:59 +00:00
|
|
|
self.logger.info("Generating checksum of %s" % path)
|
2015-02-05 15:56:24 +00:00
|
|
|
checksum = pungi.util._doCheckSum(path, 'sha256', self.logger)
|
2009-02-10 22:49:59 +00:00
|
|
|
if checksum:
|
2015-02-28 06:38:18 +00:00
|
|
|
checkfile.write("SHA256 (%s) = %s\n" % (os.path.basename(path), checksum.replace('sha256:', '')))
|
2008-07-15 03:27:40 +00:00
|
|
|
else:
|
2009-02-10 22:49:59 +00:00
|
|
|
self.logger.error('Failed to generate checksum for %s' % checkfile)
|
2008-07-15 03:27:40 +00:00
|
|
|
sys.exit(1)
|
2009-02-10 22:49:59 +00:00
|
|
|
checkfile.close()
|
2008-07-15 03:27:40 +00:00
|
|
|
|
2010-11-12 17:27:10 +00:00
|
|
|
def doCreateIsos(self):
|
|
|
|
"""Create iso of the tree."""
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2013-08-20 16:33:44 +00:00
|
|
|
if self.tree_arch.startswith('arm'):
|
2012-12-21 15:23:14 +00:00
|
|
|
self.logger.info("ARCH: arm, not doing doCreateIsos().")
|
|
|
|
return
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2012-08-31 05:39:48 +00:00
|
|
|
ppcbootinfo = '/usr/share/lorax/config_files/ppc'
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._ensuredir(self.isodir, self.logger,
|
2008-12-04 23:44:34 +00:00
|
|
|
force=self.config.getboolean('pungi', 'force'),
|
2008-06-12 13:00:43 +00:00
|
|
|
clean=True) # This is risky...
|
|
|
|
|
|
|
|
# setup the base command
|
2018-10-04 21:37:35 +00:00
|
|
|
mkisofs = ['/usr/bin/xorriso', '-as', 'mkisofs']
|
2018-07-20 08:17:38 +00:00
|
|
|
mkisofs.extend(['-v', '-U', '-J', '--joliet-long', '-R', '-T', '-m', 'repoview', '-m', 'boot.iso']) # common mkisofs flags
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
x86bootargs = ['-b', 'isolinux/isolinux.bin', '-c', 'isolinux/boot.cat',
|
|
|
|
'-no-emul-boot', '-boot-load-size', '4', '-boot-info-table']
|
|
|
|
|
2010-12-20 21:54:38 +00:00
|
|
|
efibootargs = ['-eltorito-alt-boot', '-e', 'images/efiboot.img',
|
|
|
|
'-no-emul-boot']
|
|
|
|
|
2012-03-12 15:50:20 +00:00
|
|
|
macbootargs = ['-eltorito-alt-boot', '-e', 'images/macboot.img',
|
|
|
|
'-no-emul-boot']
|
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
ia64bootargs = ['-b', 'images/boot.img', '-no-emul-boot']
|
|
|
|
|
|
|
|
ppcbootargs = ['-part', '-hfs', '-r', '-l', '-sysid', 'PPC', '-no-desktop', '-allow-multidot', '-chrp-boot']
|
|
|
|
|
|
|
|
ppcbootargs.append('-map')
|
2012-08-31 05:39:48 +00:00
|
|
|
ppcbootargs.append(os.path.join(ppcbootinfo, 'mapping'))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
ppcbootargs.append('-hfs-bless') # must be last
|
|
|
|
|
2012-03-12 15:50:20 +00:00
|
|
|
isohybrid = ['/usr/bin/isohybrid']
|
2018-10-04 21:38:57 +00:00
|
|
|
isohybrid.extend(['--id', '42'])
|
2012-03-12 15:50:20 +00:00
|
|
|
|
2008-06-12 13:00:43 +00:00
|
|
|
# Check the size of the tree
|
|
|
|
# This size checking method may be bunk, accepting patches...
|
2012-11-12 14:59:02 +00:00
|
|
|
if not self.tree_arch == 'source':
|
2008-06-12 13:00:43 +00:00
|
|
|
treesize = int(subprocess.Popen(mkisofs + ['-print-size', '-quiet', self.topdir], stdout=subprocess.PIPE).communicate()[0])
|
|
|
|
else:
|
2008-12-04 23:44:34 +00:00
|
|
|
srcdir = os.path.join(self.config.get('pungi', 'destdir'), self.config.get('pungi', 'version'),
|
2015-02-28 05:13:47 +00:00
|
|
|
self.config.get('pungi', 'variant'), 'source', 'SRPMS')
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
treesize = int(subprocess.Popen(mkisofs + ['-print-size', '-quiet', srcdir], stdout=subprocess.PIPE).communicate()[0])
|
|
|
|
# Size returned is 2KiB clusters or some such. This translates that to MiB.
|
|
|
|
treesize = treesize * 2048 / 1024 / 1024
|
|
|
|
|
|
|
|
if treesize > 700: # we're larger than a 700meg CD
|
2014-07-30 16:23:36 +00:00
|
|
|
isoname = '%s-DVD-%s-%s.iso' % (self.config.get('pungi', 'iso_basename'), self.tree_arch,
|
|
|
|
self.config.get('pungi', 'version'))
|
2008-06-12 13:00:43 +00:00
|
|
|
else:
|
2014-07-30 16:23:36 +00:00
|
|
|
isoname = '%s-%s-%s.iso' % (self.config.get('pungi', 'iso_basename'), self.tree_arch,
|
|
|
|
self.config.get('pungi', 'version'))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
isofile = os.path.join(self.isodir, isoname)
|
|
|
|
|
|
|
|
# setup the extra mkisofs args
|
|
|
|
extraargs = []
|
|
|
|
|
2012-11-12 14:59:02 +00:00
|
|
|
if self.tree_arch == 'i386' or self.tree_arch == 'x86_64':
|
2008-06-12 13:00:43 +00:00
|
|
|
extraargs.extend(x86bootargs)
|
2012-11-12 14:59:02 +00:00
|
|
|
if self.tree_arch == 'x86_64':
|
2010-12-20 21:54:38 +00:00
|
|
|
extraargs.extend(efibootargs)
|
2012-03-12 15:50:20 +00:00
|
|
|
isohybrid.append('-u')
|
2019-06-25 22:34:04 +00:00
|
|
|
if (not self.is_nomacboot) and os.path.exists(os.path.join(self.topdir, 'images', 'macboot.img')):
|
2012-03-12 15:50:20 +00:00
|
|
|
extraargs.extend(macbootargs)
|
|
|
|
isohybrid.append('-m')
|
2012-11-12 14:59:02 +00:00
|
|
|
elif self.tree_arch == 'ia64':
|
2008-06-12 13:00:43 +00:00
|
|
|
extraargs.extend(ia64bootargs)
|
2012-11-12 14:59:02 +00:00
|
|
|
elif self.tree_arch.startswith('ppc'):
|
2008-06-12 13:00:43 +00:00
|
|
|
extraargs.extend(ppcbootargs)
|
|
|
|
extraargs.append(os.path.join(self.topdir, "ppc/mac"))
|
2014-12-09 16:48:59 +00:00
|
|
|
elif self.tree_arch.startswith('aarch64'):
|
|
|
|
extraargs.extend(efibootargs)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2011-10-20 19:38:25 +00:00
|
|
|
# NOTE: if this doesn't match what's in the bootloader config, the
|
|
|
|
# image won't be bootable!
|
2008-06-12 13:00:43 +00:00
|
|
|
extraargs.append('-V')
|
2014-07-31 11:02:12 +00:00
|
|
|
extraargs.append(self._shortenVolID())
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2009-01-30 01:33:13 +00:00
|
|
|
extraargs.extend(['-o', isofile])
|
2012-03-12 15:50:20 +00:00
|
|
|
|
|
|
|
isohybrid.append(isofile)
|
|
|
|
|
2012-11-12 14:59:02 +00:00
|
|
|
if not self.tree_arch == 'source':
|
2008-06-12 13:00:43 +00:00
|
|
|
extraargs.append(self.topdir)
|
|
|
|
else:
|
|
|
|
extraargs.append(os.path.join(self.archdir, 'SRPMS'))
|
|
|
|
|
2014-07-23 12:41:10 +00:00
|
|
|
if self.config.get('pungi', 'no_dvd') == "False":
|
2014-07-18 19:30:52 +00:00
|
|
|
# run the command
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._doRunCommand(mkisofs + extraargs, self.logger)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2014-07-23 12:41:10 +00:00
|
|
|
# Run isohybrid on the iso as long as its not the source iso
|
|
|
|
if os.path.exists("/usr/bin/isohybrid") and not self.tree_arch == 'source':
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._doRunCommand(isohybrid, self.logger)
|
2011-05-26 15:01:43 +00:00
|
|
|
|
2014-07-23 12:41:10 +00:00
|
|
|
# implant md5 for mediacheck on all but source arches
|
|
|
|
if not self.tree_arch == 'source':
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._doRunCommand(['/usr/bin/implantisomd5', isofile], self.logger)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2009-02-10 23:03:29 +00:00
|
|
|
# shove the checksum into a file
|
|
|
|
csumfile = os.path.join(self.isodir, '%s-%s-%s-CHECKSUM' % (
|
|
|
|
self.config.get('pungi', 'iso_basename'),
|
|
|
|
self.config.get('pungi', 'version'),
|
2012-11-12 14:59:02 +00:00
|
|
|
self.tree_arch))
|
2010-04-14 23:01:17 +00:00
|
|
|
# Write a line about what checksums are used.
|
|
|
|
# sha256sum is magic...
|
|
|
|
file = open(csumfile, 'w')
|
|
|
|
file.write('# The image checksum(s) are generated with sha256sum.\n')
|
|
|
|
file.close()
|
2014-07-23 12:41:10 +00:00
|
|
|
if self.config.get('pungi', 'no_dvd') == "False":
|
2014-07-18 19:30:52 +00:00
|
|
|
self._doIsoChecksum(isofile, csumfile)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2014-07-18 19:30:52 +00:00
|
|
|
# Write out a line describing the media
|
|
|
|
self.writeinfo('media: %s' % self.mkrelative(isofile))
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
# Now link the boot iso
|
2012-11-12 14:59:02 +00:00
|
|
|
if not self.tree_arch == 'source' and \
|
2008-06-12 13:00:43 +00:00
|
|
|
os.path.exists(os.path.join(self.topdir, 'images', 'boot.iso')):
|
2014-07-30 16:23:36 +00:00
|
|
|
isoname = '%s-netinst-%s-%s.iso' % (self.config.get('pungi', 'iso_basename'),
|
|
|
|
self.tree_arch, self.config.get('pungi', 'version'))
|
2008-06-12 13:00:43 +00:00
|
|
|
isofile = os.path.join(self.isodir, isoname)
|
|
|
|
|
|
|
|
# link the boot iso to the iso dir
|
2015-02-05 15:56:24 +00:00
|
|
|
pungi.util._link(os.path.join(self.topdir, 'images', 'boot.iso'), isofile, self.logger)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
2009-02-10 23:03:29 +00:00
|
|
|
# shove the checksum into a file
|
|
|
|
self._doIsoChecksum(isofile, csumfile)
|
2008-06-12 13:00:43 +00:00
|
|
|
|
|
|
|
self.logger.info("CreateIsos is done.")
|