Format code base with black
https://black.readthedocs.io/en/stable/ JIRA: COMPOSE-4086 Signed-off-by: Haibo Lin <hlin@redhat.com>
This commit is contained in:
parent
38142d30ba
commit
41a629969c
@ -9,15 +9,20 @@ def get_full_version():
|
|||||||
Find full version of Pungi: if running from git, this will return cleaned
|
Find full version of Pungi: if running from git, this will return cleaned
|
||||||
output of `git describe`, otherwise it will look for installed version.
|
output of `git describe`, otherwise it will look for installed version.
|
||||||
"""
|
"""
|
||||||
location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
|
location = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
|
||||||
if os.path.isdir(os.path.join(location, '.git')):
|
if os.path.isdir(os.path.join(location, ".git")):
|
||||||
import subprocess
|
import subprocess
|
||||||
proc = subprocess.Popen(['git', '--git-dir=%s/.git' % location, 'describe', '--tags'],
|
|
||||||
stdout=subprocess.PIPE, universal_newlines=True)
|
proc = subprocess.Popen(
|
||||||
|
["git", "--git-dir=%s/.git" % location, "describe", "--tags"],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
output, _ = proc.communicate()
|
output, _ = proc.communicate()
|
||||||
return re.sub(r'-1.fc\d\d?', '', output.strip().replace('pungi-', ''))
|
return re.sub(r"-1.fc\d\d?", "", output.strip().replace("pungi-", ""))
|
||||||
else:
|
else:
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
proc = subprocess.Popen(
|
proc = subprocess.Popen(
|
||||||
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, universal_newlines=True
|
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, universal_newlines=True
|
||||||
)
|
)
|
||||||
@ -25,4 +30,4 @@ def get_full_version():
|
|||||||
if not err:
|
if not err:
|
||||||
return output.rstrip()
|
return output.rstrip()
|
||||||
else:
|
else:
|
||||||
return 'unknown'
|
return "unknown"
|
||||||
|
@ -93,14 +93,18 @@ def split_name_arch(name_arch):
|
|||||||
|
|
||||||
def is_excluded(package, arches, logger=None):
|
def is_excluded(package, arches, logger=None):
|
||||||
"""Check if package is excluded from given architectures."""
|
"""Check if package is excluded from given architectures."""
|
||||||
if (package.excludearch and set(package.excludearch) & set(arches)):
|
if package.excludearch and set(package.excludearch) & set(arches):
|
||||||
if logger:
|
if logger:
|
||||||
logger.debug("Excluding (EXCLUDEARCH: %s): %s"
|
logger.debug(
|
||||||
% (sorted(set(package.excludearch)), package.file_name))
|
"Excluding (EXCLUDEARCH: %s): %s"
|
||||||
|
% (sorted(set(package.excludearch)), package.file_name)
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
if (package.exclusivearch and not (set(package.exclusivearch) & set(arches))):
|
if package.exclusivearch and not (set(package.exclusivearch) & set(arches)):
|
||||||
if logger:
|
if logger:
|
||||||
logger.debug("Excluding (EXCLUSIVEARCH: %s): %s"
|
logger.debug(
|
||||||
% (sorted(set(package.exclusivearch)), package.file_name))
|
"Excluding (EXCLUSIVEARCH: %s): %s"
|
||||||
|
% (sorted(set(package.exclusivearch)), package.file_name)
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@ -12,12 +12,13 @@ import struct
|
|||||||
_ppc64_native_is_best = True
|
_ppc64_native_is_best = True
|
||||||
|
|
||||||
# dict mapping arch -> ( multicompat, best personality, biarch personality )
|
# dict mapping arch -> ( multicompat, best personality, biarch personality )
|
||||||
multilibArches = {"x86_64": ("athlon", "x86_64", "athlon"),
|
multilibArches = {
|
||||||
|
"x86_64": ("athlon", "x86_64", "athlon"),
|
||||||
"sparc64v": ("sparcv9v", "sparcv9v", "sparc64v"),
|
"sparc64v": ("sparcv9v", "sparcv9v", "sparc64v"),
|
||||||
"sparc64": ("sparcv9", "sparcv9", "sparc64"),
|
"sparc64": ("sparcv9", "sparcv9", "sparc64"),
|
||||||
"ppc64": ("ppc", "ppc", "ppc64"),
|
"ppc64": ("ppc", "ppc", "ppc64"),
|
||||||
"s390x": ("s390", "s390x", "s390"),
|
"s390x": ("s390", "s390x", "s390"),
|
||||||
}
|
}
|
||||||
if _ppc64_native_is_best:
|
if _ppc64_native_is_best:
|
||||||
multilibArches["ppc64"] = ("ppc", "ppc64", "ppc64")
|
multilibArches["ppc64"] = ("ppc", "ppc64", "ppc64")
|
||||||
|
|
||||||
@ -29,26 +30,21 @@ arches = {
|
|||||||
"i586": "i486",
|
"i586": "i486",
|
||||||
"i486": "i386",
|
"i486": "i386",
|
||||||
"i386": "noarch",
|
"i386": "noarch",
|
||||||
|
|
||||||
# amd64
|
# amd64
|
||||||
"x86_64": "athlon",
|
"x86_64": "athlon",
|
||||||
"amd64": "x86_64",
|
"amd64": "x86_64",
|
||||||
"ia32e": "x86_64",
|
"ia32e": "x86_64",
|
||||||
|
|
||||||
# ppc64le
|
# ppc64le
|
||||||
"ppc64le": "noarch",
|
"ppc64le": "noarch",
|
||||||
|
|
||||||
# ppc
|
# ppc
|
||||||
"ppc64p7": "ppc64",
|
"ppc64p7": "ppc64",
|
||||||
"ppc64pseries": "ppc64",
|
"ppc64pseries": "ppc64",
|
||||||
"ppc64iseries": "ppc64",
|
"ppc64iseries": "ppc64",
|
||||||
"ppc64": "ppc",
|
"ppc64": "ppc",
|
||||||
"ppc": "noarch",
|
"ppc": "noarch",
|
||||||
|
|
||||||
# s390{,x}
|
# s390{,x}
|
||||||
"s390x": "s390",
|
"s390x": "s390",
|
||||||
"s390": "noarch",
|
"s390": "noarch",
|
||||||
|
|
||||||
# sparc
|
# sparc
|
||||||
"sparc64v": "sparcv9v",
|
"sparc64v": "sparcv9v",
|
||||||
"sparc64": "sparcv9",
|
"sparc64": "sparcv9",
|
||||||
@ -56,7 +52,6 @@ arches = {
|
|||||||
"sparcv9": "sparcv8",
|
"sparcv9": "sparcv8",
|
||||||
"sparcv8": "sparc",
|
"sparcv8": "sparc",
|
||||||
"sparc": "noarch",
|
"sparc": "noarch",
|
||||||
|
|
||||||
# alpha
|
# alpha
|
||||||
"alphaev7": "alphaev68",
|
"alphaev7": "alphaev68",
|
||||||
"alphaev68": "alphaev67",
|
"alphaev68": "alphaev67",
|
||||||
@ -68,29 +63,23 @@ arches = {
|
|||||||
"alphaev45": "alphaev4",
|
"alphaev45": "alphaev4",
|
||||||
"alphaev4": "alpha",
|
"alphaev4": "alpha",
|
||||||
"alpha": "noarch",
|
"alpha": "noarch",
|
||||||
|
|
||||||
# arm
|
# arm
|
||||||
"armv7l": "armv6l",
|
"armv7l": "armv6l",
|
||||||
"armv6l": "armv5tejl",
|
"armv6l": "armv5tejl",
|
||||||
"armv5tejl": "armv5tel",
|
"armv5tejl": "armv5tel",
|
||||||
"armv5tel": "noarch",
|
"armv5tel": "noarch",
|
||||||
|
|
||||||
# arm hardware floating point
|
# arm hardware floating point
|
||||||
"armv7hnl": "armv7hl",
|
"armv7hnl": "armv7hl",
|
||||||
"armv7hl": "armv6hl",
|
"armv7hl": "armv6hl",
|
||||||
"armv6hl": "noarch",
|
"armv6hl": "noarch",
|
||||||
|
|
||||||
# arm64
|
# arm64
|
||||||
"arm64": "noarch",
|
"arm64": "noarch",
|
||||||
|
|
||||||
# aarch64
|
# aarch64
|
||||||
"aarch64": "noarch",
|
"aarch64": "noarch",
|
||||||
|
|
||||||
# super-h
|
# super-h
|
||||||
"sh4a": "sh4",
|
"sh4a": "sh4",
|
||||||
"sh4": "noarch",
|
"sh4": "noarch",
|
||||||
"sh3": "noarch",
|
"sh3": "noarch",
|
||||||
|
|
||||||
# itanium
|
# itanium
|
||||||
"ia64": "noarch",
|
"ia64": "noarch",
|
||||||
}
|
}
|
||||||
@ -137,7 +126,7 @@ def getArchList(thisarch=None): # pragma: no cover
|
|||||||
|
|
||||||
# if we're a weirdo arch - add noarch on there.
|
# if we're a weirdo arch - add noarch on there.
|
||||||
if len(archlist) == 1 and archlist[0] == thisarch:
|
if len(archlist) == 1 and archlist[0] == thisarch:
|
||||||
archlist.append('noarch')
|
archlist.append("noarch")
|
||||||
return archlist
|
return archlist
|
||||||
|
|
||||||
|
|
||||||
@ -208,10 +197,10 @@ def getCanonX86Arch(arch): # pragma: no cover
|
|||||||
|
|
||||||
def getCanonARMArch(arch): # pragma: no cover
|
def getCanonARMArch(arch): # pragma: no cover
|
||||||
# the %{_target_arch} macro in rpm will let us know the abi we are using
|
# the %{_target_arch} macro in rpm will let us know the abi we are using
|
||||||
target = rpm.expandMacro('%{_target_cpu}')
|
target = rpm.expandMacro("%{_target_cpu}")
|
||||||
if target.startswith('armv6h'):
|
if target.startswith("armv6h"):
|
||||||
return target
|
return target
|
||||||
if target.startswith('armv7h'):
|
if target.startswith("armv7h"):
|
||||||
return target
|
return target
|
||||||
return arch
|
return arch
|
||||||
|
|
||||||
@ -224,7 +213,7 @@ def getCanonPPCArch(arch): # pragma: no cover
|
|||||||
machine = None
|
machine = None
|
||||||
for line in _try_read_cpuinfo():
|
for line in _try_read_cpuinfo():
|
||||||
if line.find("machine") != -1:
|
if line.find("machine") != -1:
|
||||||
machine = line.split(':')[1]
|
machine = line.split(":")[1]
|
||||||
break
|
break
|
||||||
|
|
||||||
platform = _aux_vector["platform"]
|
platform = _aux_vector["platform"]
|
||||||
@ -232,7 +221,7 @@ def getCanonPPCArch(arch): # pragma: no cover
|
|||||||
return arch
|
return arch
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if platform.startswith("power") and int(platform[5:].rstrip('+')) >= 7:
|
if platform.startswith("power") and int(platform[5:].rstrip("+")) >= 7:
|
||||||
return "ppc64p7"
|
return "ppc64p7"
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@ -252,7 +241,7 @@ def getCanonSPARCArch(arch): # pragma: no cover
|
|||||||
SPARCtype = None
|
SPARCtype = None
|
||||||
for line in _try_read_cpuinfo():
|
for line in _try_read_cpuinfo():
|
||||||
if line.startswith("type"):
|
if line.startswith("type"):
|
||||||
SPARCtype = line.split(':')[1]
|
SPARCtype = line.split(":")[1]
|
||||||
break
|
break
|
||||||
if SPARCtype is None:
|
if SPARCtype is None:
|
||||||
return arch
|
return arch
|
||||||
@ -279,7 +268,7 @@ def getCanonX86_64Arch(arch): # pragma: no cover
|
|||||||
vendor = None
|
vendor = None
|
||||||
for line in _try_read_cpuinfo():
|
for line in _try_read_cpuinfo():
|
||||||
if line.startswith("vendor_id"):
|
if line.startswith("vendor_id"):
|
||||||
vendor = line.split(':')[1]
|
vendor = line.split(":")[1]
|
||||||
break
|
break
|
||||||
if vendor is None:
|
if vendor is None:
|
||||||
return arch
|
return arch
|
||||||
@ -308,7 +297,7 @@ def getCanonArch(skipRpmPlatform=0): # pragma: no cover
|
|||||||
|
|
||||||
_parse_auxv()
|
_parse_auxv()
|
||||||
|
|
||||||
if (len(arch) == 4 and arch[0] == "i" and arch[2:4] == "86"):
|
if len(arch) == 4 and arch[0] == "i" and arch[2:4] == "86":
|
||||||
return getCanonX86Arch(arch)
|
return getCanonX86Arch(arch)
|
||||||
|
|
||||||
if arch.startswith("arm"):
|
if arch.startswith("arm"):
|
||||||
@ -370,7 +359,7 @@ def getBaseArch(myarch=None): # pragma: no cover
|
|||||||
if myarch in arches:
|
if myarch in arches:
|
||||||
basearch = myarch
|
basearch = myarch
|
||||||
value = arches[basearch]
|
value = arches[basearch]
|
||||||
while value != 'noarch':
|
while value != "noarch":
|
||||||
basearch = value
|
basearch = value
|
||||||
value = arches[basearch]
|
value = arches[basearch]
|
||||||
|
|
||||||
|
716
pungi/checks.py
716
pungi/checks.py
File diff suppressed because it is too large
Load Diff
@ -15,7 +15,6 @@
|
|||||||
|
|
||||||
|
|
||||||
class OptionsBase(object):
|
class OptionsBase(object):
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
inherit and initialize attributes
|
inherit and initialize attributes
|
||||||
@ -29,5 +28,7 @@ class OptionsBase(object):
|
|||||||
"""
|
"""
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
if not hasattr(self, key):
|
if not hasattr(self, key):
|
||||||
raise ValueError("Invalid option in %s: %s" % (self.__class__.__name__, key))
|
raise ValueError(
|
||||||
|
"Invalid option in %s: %s" % (self.__class__.__name__, key)
|
||||||
|
)
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
175
pungi/compose.py
175
pungi/compose.py
@ -14,9 +14,7 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = ("Compose",)
|
||||||
"Compose",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
@ -38,7 +36,10 @@ from pungi.wrappers.variants import VariantsXmlParser
|
|||||||
from pungi.paths import Paths
|
from pungi.paths import Paths
|
||||||
from pungi.wrappers.scm import get_file_from_scm
|
from pungi.wrappers.scm import get_file_from_scm
|
||||||
from pungi.util import (
|
from pungi.util import (
|
||||||
makedirs, get_arch_variant_data, get_format_substs, get_variant_data
|
makedirs,
|
||||||
|
get_arch_variant_data,
|
||||||
|
get_format_substs,
|
||||||
|
get_variant_data,
|
||||||
)
|
)
|
||||||
from pungi.metadata import compose_to_composeinfo
|
from pungi.metadata import compose_to_composeinfo
|
||||||
|
|
||||||
@ -50,7 +51,15 @@ except ImportError:
|
|||||||
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
||||||
|
|
||||||
|
|
||||||
def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, compose_respin=None, compose_label=None, already_exists_callbacks=None):
|
def get_compose_dir(
|
||||||
|
topdir,
|
||||||
|
conf,
|
||||||
|
compose_type="production",
|
||||||
|
compose_date=None,
|
||||||
|
compose_respin=None,
|
||||||
|
compose_label=None,
|
||||||
|
already_exists_callbacks=None,
|
||||||
|
):
|
||||||
already_exists_callbacks = already_exists_callbacks or []
|
already_exists_callbacks = already_exists_callbacks or []
|
||||||
|
|
||||||
# create an incomplete composeinfo to generate compose ID
|
# create an incomplete composeinfo to generate compose ID
|
||||||
@ -107,7 +116,18 @@ def get_compose_dir(topdir, conf, compose_type="production", compose_date=None,
|
|||||||
|
|
||||||
|
|
||||||
class Compose(kobo.log.LoggingBase):
|
class Compose(kobo.log.LoggingBase):
|
||||||
def __init__(self, conf, topdir, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None, notifier=None):
|
def __init__(
|
||||||
|
self,
|
||||||
|
conf,
|
||||||
|
topdir,
|
||||||
|
skip_phases=None,
|
||||||
|
just_phases=None,
|
||||||
|
old_composes=None,
|
||||||
|
koji_event=None,
|
||||||
|
supported=False,
|
||||||
|
logger=None,
|
||||||
|
notifier=None,
|
||||||
|
):
|
||||||
kobo.log.LoggingBase.__init__(self, logger)
|
kobo.log.LoggingBase.__init__(self, logger)
|
||||||
# TODO: check if minimal conf values are set
|
# TODO: check if minimal conf values are set
|
||||||
self.conf = conf
|
self.conf = conf
|
||||||
@ -128,18 +148,27 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
# Set up logging to file
|
# Set up logging to file
|
||||||
if logger:
|
if logger:
|
||||||
kobo.log.add_file_logger(logger, self.paths.log.log_file("global", "pungi.log"))
|
kobo.log.add_file_logger(
|
||||||
kobo.log.add_file_logger(logger, self.paths.log.log_file("global", "excluding-arch.log"))
|
logger, self.paths.log.log_file("global", "pungi.log")
|
||||||
|
)
|
||||||
|
kobo.log.add_file_logger(
|
||||||
|
logger, self.paths.log.log_file("global", "excluding-arch.log")
|
||||||
|
)
|
||||||
|
|
||||||
class PungiLogFilter(logging.Filter):
|
class PungiLogFilter(logging.Filter):
|
||||||
def filter(self, record):
|
def filter(self, record):
|
||||||
return False if record.funcName and record.funcName == 'is_excluded' else True
|
return (
|
||||||
|
False
|
||||||
|
if record.funcName and record.funcName == "is_excluded"
|
||||||
|
else True
|
||||||
|
)
|
||||||
|
|
||||||
class ExcludingArchLogFilter(logging.Filter):
|
class ExcludingArchLogFilter(logging.Filter):
|
||||||
def filter(self, record):
|
def filter(self, record):
|
||||||
message = record.getMessage()
|
message = record.getMessage()
|
||||||
if 'Populating package set for arch:' in message or \
|
if "Populating package set for arch:" in message or (
|
||||||
(record.funcName and record.funcName == 'is_excluded'):
|
record.funcName and record.funcName == "is_excluded"
|
||||||
|
):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -147,18 +176,26 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
for handler in logger.handlers:
|
for handler in logger.handlers:
|
||||||
if isinstance(handler, logging.FileHandler):
|
if isinstance(handler, logging.FileHandler):
|
||||||
log_file_name = os.path.basename(handler.stream.name)
|
log_file_name = os.path.basename(handler.stream.name)
|
||||||
if log_file_name == 'pungi.global.log':
|
if log_file_name == "pungi.global.log":
|
||||||
handler.addFilter(PungiLogFilter())
|
handler.addFilter(PungiLogFilter())
|
||||||
elif log_file_name == 'excluding-arch.global.log':
|
elif log_file_name == "excluding-arch.global.log":
|
||||||
handler.addFilter(ExcludingArchLogFilter())
|
handler.addFilter(ExcludingArchLogFilter())
|
||||||
|
|
||||||
# to provide compose_id, compose_date and compose_respin
|
# to provide compose_id, compose_date and compose_respin
|
||||||
self.ci_base = ComposeInfo()
|
self.ci_base = ComposeInfo()
|
||||||
self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))
|
self.ci_base.load(
|
||||||
|
os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json")
|
||||||
|
)
|
||||||
|
|
||||||
self.supported = supported
|
self.supported = supported
|
||||||
if self.compose_label and self.compose_label.split("-")[0] in SUPPORTED_MILESTONES:
|
if (
|
||||||
self.log_info("Automatically setting 'supported' flag due to label: %s." % self.compose_label)
|
self.compose_label
|
||||||
|
and self.compose_label.split("-")[0] in SUPPORTED_MILESTONES
|
||||||
|
):
|
||||||
|
self.log_info(
|
||||||
|
"Automatically setting 'supported' flag due to label: %s."
|
||||||
|
% self.compose_label
|
||||||
|
)
|
||||||
self.supported = True
|
self.supported = True
|
||||||
|
|
||||||
self.im = Images()
|
self.im = Images()
|
||||||
@ -179,10 +216,10 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
self.cache_region = make_region().configure(
|
self.cache_region = make_region().configure(
|
||||||
self.conf.get("dogpile_cache_backend"),
|
self.conf.get("dogpile_cache_backend"),
|
||||||
expiration_time=self.conf.get("dogpile_cache_expiration_time", 3600),
|
expiration_time=self.conf.get("dogpile_cache_expiration_time", 3600),
|
||||||
arguments=self.conf.get("dogpile_cache_arguments", {})
|
arguments=self.conf.get("dogpile_cache_arguments", {}),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.cache_region = make_region().configure('dogpile.cache.null')
|
self.cache_region = make_region().configure("dogpile.cache.null")
|
||||||
|
|
||||||
get_compose_dir = staticmethod(get_compose_dir)
|
get_compose_dir = staticmethod(get_compose_dir)
|
||||||
|
|
||||||
@ -234,10 +271,10 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
"""Explicit configuration trumps all. Otherwise check gather backend
|
"""Explicit configuration trumps all. Otherwise check gather backend
|
||||||
and only create it for Yum.
|
and only create it for Yum.
|
||||||
"""
|
"""
|
||||||
config = self.conf.get('createrepo_database')
|
config = self.conf.get("createrepo_database")
|
||||||
if config is not None:
|
if config is not None:
|
||||||
return config
|
return config
|
||||||
return self.conf['gather_backend'] == 'yum'
|
return self.conf["gather_backend"] == "yum"
|
||||||
|
|
||||||
def read_variants(self):
|
def read_variants(self):
|
||||||
# TODO: move to phases/init ?
|
# TODO: move to phases/init ?
|
||||||
@ -263,7 +300,9 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
tree_arches = self.conf.get("tree_arches", None)
|
tree_arches = self.conf.get("tree_arches", None)
|
||||||
tree_variants = self.conf.get("tree_variants", None)
|
tree_variants = self.conf.get("tree_variants", None)
|
||||||
with open(variants_file, "r") as file_obj:
|
with open(variants_file, "r") as file_obj:
|
||||||
parser = VariantsXmlParser(file_obj, tree_arches, tree_variants, logger=self._logger)
|
parser = VariantsXmlParser(
|
||||||
|
file_obj, tree_arches, tree_variants, logger=self._logger
|
||||||
|
)
|
||||||
self.variants = parser.parse()
|
self.variants = parser.parse()
|
||||||
|
|
||||||
self.all_variants = {}
|
self.all_variants = {}
|
||||||
@ -294,21 +333,28 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
@property
|
@property
|
||||||
def status_file(self):
|
def status_file(self):
|
||||||
"""Path to file where the compose status will be stored."""
|
"""Path to file where the compose status will be stored."""
|
||||||
if not hasattr(self, '_status_file'):
|
if not hasattr(self, "_status_file"):
|
||||||
self._status_file = os.path.join(self.topdir, 'STATUS')
|
self._status_file = os.path.join(self.topdir, "STATUS")
|
||||||
return self._status_file
|
return self._status_file
|
||||||
|
|
||||||
def _log_failed_deliverables(self):
|
def _log_failed_deliverables(self):
|
||||||
for kind, data in self.failed_deliverables.items():
|
for kind, data in self.failed_deliverables.items():
|
||||||
for variant, arch, subvariant in data:
|
for variant, arch, subvariant in data:
|
||||||
self.log_info('Failed %s on variant <%s>, arch <%s>, subvariant <%s>.'
|
self.log_info(
|
||||||
% (kind, variant, arch, subvariant))
|
"Failed %s on variant <%s>, arch <%s>, subvariant <%s>."
|
||||||
log = os.path.join(self.paths.log.topdir('global'), 'deliverables.json')
|
% (kind, variant, arch, subvariant)
|
||||||
with open(log, 'w') as f:
|
)
|
||||||
json.dump({'required': self.required_deliverables,
|
log = os.path.join(self.paths.log.topdir("global"), "deliverables.json")
|
||||||
'failed': self.failed_deliverables,
|
with open(log, "w") as f:
|
||||||
'attempted': self.attempted_deliverables},
|
json.dump(
|
||||||
f, indent=4)
|
{
|
||||||
|
"required": self.required_deliverables,
|
||||||
|
"failed": self.failed_deliverables,
|
||||||
|
"attempted": self.attempted_deliverables,
|
||||||
|
},
|
||||||
|
f,
|
||||||
|
indent=4,
|
||||||
|
)
|
||||||
|
|
||||||
def write_status(self, stat_msg):
|
def write_status(self, stat_msg):
|
||||||
if stat_msg not in ("STARTED", "FINISHED", "DOOMED", "TERMINATED"):
|
if stat_msg not in ("STARTED", "FINISHED", "DOOMED", "TERMINATED"):
|
||||||
@ -321,8 +367,8 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
self.log_error(msg)
|
self.log_error(msg)
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
if stat_msg == 'FINISHED' and self.failed_deliverables:
|
if stat_msg == "FINISHED" and self.failed_deliverables:
|
||||||
stat_msg = 'FINISHED_INCOMPLETE'
|
stat_msg = "FINISHED_INCOMPLETE"
|
||||||
|
|
||||||
self._log_failed_deliverables()
|
self._log_failed_deliverables()
|
||||||
|
|
||||||
@ -330,21 +376,22 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
f.write(stat_msg + "\n")
|
f.write(stat_msg + "\n")
|
||||||
|
|
||||||
if self.notifier:
|
if self.notifier:
|
||||||
self.notifier.send('status-change', status=stat_msg)
|
self.notifier.send("status-change", status=stat_msg)
|
||||||
|
|
||||||
def get_status(self):
|
def get_status(self):
|
||||||
if not os.path.isfile(self.status_file):
|
if not os.path.isfile(self.status_file):
|
||||||
return
|
return
|
||||||
return open(self.status_file, "r").read().strip()
|
return open(self.status_file, "r").read().strip()
|
||||||
|
|
||||||
def get_image_name(self, arch, variant, disc_type='dvd',
|
def get_image_name(
|
||||||
disc_num=1, suffix='.iso', format=None):
|
self, arch, variant, disc_type="dvd", disc_num=1, suffix=".iso", format=None
|
||||||
|
):
|
||||||
"""Create a filename for image with given parameters.
|
"""Create a filename for image with given parameters.
|
||||||
|
|
||||||
:raises RuntimeError: when unknown ``disc_type`` is given
|
:raises RuntimeError: when unknown ``disc_type`` is given
|
||||||
"""
|
"""
|
||||||
default_format = "{compose_id}-{variant}-{arch}-{disc_type}{disc_num}{suffix}"
|
default_format = "{compose_id}-{variant}-{arch}-{disc_type}{disc_num}{suffix}"
|
||||||
format = format or self.conf.get('image_name_format', default_format)
|
format = format or self.conf.get("image_name_format", default_format)
|
||||||
|
|
||||||
if isinstance(format, dict):
|
if isinstance(format, dict):
|
||||||
conf = get_variant_data(self.conf, "image_name_format", variant)
|
conf = get_variant_data(self.conf, "image_name_format", variant)
|
||||||
@ -359,47 +406,54 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
disc_num = ""
|
disc_num = ""
|
||||||
|
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'arch': arch,
|
"arch": arch,
|
||||||
'disc_type': disc_type,
|
"disc_type": disc_type,
|
||||||
'disc_num': disc_num,
|
"disc_num": disc_num,
|
||||||
'suffix': suffix
|
"suffix": suffix,
|
||||||
}
|
}
|
||||||
if variant.type == "layered-product":
|
if variant.type == "layered-product":
|
||||||
variant_uid = variant.parent.uid
|
variant_uid = variant.parent.uid
|
||||||
kwargs['compose_id'] = self.ci_base[variant.uid].compose_id
|
kwargs["compose_id"] = self.ci_base[variant.uid].compose_id
|
||||||
else:
|
else:
|
||||||
variant_uid = variant.uid
|
variant_uid = variant.uid
|
||||||
args = get_format_substs(self, variant=variant_uid, **kwargs)
|
args = get_format_substs(self, variant=variant_uid, **kwargs)
|
||||||
try:
|
try:
|
||||||
return (format % args).format(**args)
|
return (format % args).format(**args)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise RuntimeError('Failed to create image name: unknown format element: %s' % err)
|
raise RuntimeError(
|
||||||
|
"Failed to create image name: unknown format element: %s" % err
|
||||||
|
)
|
||||||
|
|
||||||
def can_fail(self, variant, arch, deliverable):
|
def can_fail(self, variant, arch, deliverable):
|
||||||
"""Figure out if deliverable can fail on variant.arch.
|
"""Figure out if deliverable can fail on variant.arch.
|
||||||
|
|
||||||
Variant can be None.
|
Variant can be None.
|
||||||
"""
|
"""
|
||||||
failable = get_arch_variant_data(self.conf, 'failable_deliverables', arch, variant)
|
failable = get_arch_variant_data(
|
||||||
|
self.conf, "failable_deliverables", arch, variant
|
||||||
|
)
|
||||||
return deliverable in failable
|
return deliverable in failable
|
||||||
|
|
||||||
def attempt_deliverable(self, variant, arch, kind, subvariant=None):
|
def attempt_deliverable(self, variant, arch, kind, subvariant=None):
|
||||||
"""Log information about attempted deliverable."""
|
"""Log information about attempted deliverable."""
|
||||||
variant_uid = variant.uid if variant else ''
|
variant_uid = variant.uid if variant else ""
|
||||||
self.attempted_deliverables.setdefault(kind, []).append(
|
self.attempted_deliverables.setdefault(kind, []).append(
|
||||||
(variant_uid, arch, subvariant))
|
(variant_uid, arch, subvariant)
|
||||||
|
)
|
||||||
|
|
||||||
def require_deliverable(self, variant, arch, kind, subvariant=None):
|
def require_deliverable(self, variant, arch, kind, subvariant=None):
|
||||||
"""Log information about attempted deliverable."""
|
"""Log information about attempted deliverable."""
|
||||||
variant_uid = variant.uid if variant else ''
|
variant_uid = variant.uid if variant else ""
|
||||||
self.required_deliverables.setdefault(kind, []).append(
|
self.required_deliverables.setdefault(kind, []).append(
|
||||||
(variant_uid, arch, subvariant))
|
(variant_uid, arch, subvariant)
|
||||||
|
)
|
||||||
|
|
||||||
def fail_deliverable(self, variant, arch, kind, subvariant=None):
|
def fail_deliverable(self, variant, arch, kind, subvariant=None):
|
||||||
"""Log information about failed deliverable."""
|
"""Log information about failed deliverable."""
|
||||||
variant_uid = variant.uid if variant else ''
|
variant_uid = variant.uid if variant else ""
|
||||||
self.failed_deliverables.setdefault(kind, []).append(
|
self.failed_deliverables.setdefault(kind, []).append(
|
||||||
(variant_uid, arch, subvariant))
|
(variant_uid, arch, subvariant)
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image_release(self):
|
def image_release(self):
|
||||||
@ -409,11 +463,14 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
otherwise we will create a string with date, compose type and respin.
|
otherwise we will create a string with date, compose type and respin.
|
||||||
"""
|
"""
|
||||||
if self.compose_label:
|
if self.compose_label:
|
||||||
milestone, release = self.compose_label.split('-')
|
milestone, release = self.compose_label.split("-")
|
||||||
return release
|
return release
|
||||||
|
|
||||||
return '%s%s.%s' % (self.compose_date, self.ci_base.compose.type_suffix,
|
return "%s%s.%s" % (
|
||||||
self.compose_respin)
|
self.compose_date,
|
||||||
|
self.ci_base.compose.type_suffix,
|
||||||
|
self.compose_respin,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image_version(self):
|
def image_version(self):
|
||||||
@ -423,9 +480,9 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
milestone from it is appended to the version (unless it is RC).
|
milestone from it is appended to the version (unless it is RC).
|
||||||
"""
|
"""
|
||||||
version = self.ci_base.release.version
|
version = self.ci_base.release.version
|
||||||
if self.compose_label and not self.compose_label.startswith('RC-'):
|
if self.compose_label and not self.compose_label.startswith("RC-"):
|
||||||
milestone, release = self.compose_label.split('-')
|
milestone, release = self.compose_label.split("-")
|
||||||
return '%s_%s' % (version, milestone)
|
return "%s_%s" % (version, milestone)
|
||||||
|
|
||||||
return version
|
return version
|
||||||
|
|
||||||
@ -451,7 +508,7 @@ def get_ordered_variant_uids(compose):
|
|||||||
setattr(
|
setattr(
|
||||||
compose,
|
compose,
|
||||||
"_ordered_variant_uids",
|
"_ordered_variant_uids",
|
||||||
unordered_variant_uids + ordered_variant_uids
|
unordered_variant_uids + ordered_variant_uids,
|
||||||
)
|
)
|
||||||
return getattr(compose, "_ordered_variant_uids")
|
return getattr(compose, "_ordered_variant_uids")
|
||||||
|
|
||||||
@ -469,7 +526,9 @@ def _prepare_variant_as_lookaside(compose):
|
|||||||
try:
|
try:
|
||||||
graph.add_edge(variant, lookaside_variant)
|
graph.add_edge(variant, lookaside_variant)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError("There is a bad configuration in 'variant_as_lookaside': %s" % e)
|
raise ValueError(
|
||||||
|
"There is a bad configuration in 'variant_as_lookaside': %s" % e
|
||||||
|
)
|
||||||
|
|
||||||
variant_processing_order = reversed(graph.prune_graph())
|
variant_processing_order = reversed(graph.prune_graph())
|
||||||
return list(variant_processing_order)
|
return list(variant_processing_order)
|
||||||
|
@ -42,9 +42,12 @@ def write_discinfo(file_path, description, arch, disc_numbers=None, timestamp=No
|
|||||||
"""
|
"""
|
||||||
disc_numbers = disc_numbers or ["ALL"]
|
disc_numbers = disc_numbers or ["ALL"]
|
||||||
if not isinstance(disc_numbers, list):
|
if not isinstance(disc_numbers, list):
|
||||||
raise TypeError("Invalid type: disc_numbers type is %s; expected: <list>" % type(disc_numbers))
|
raise TypeError(
|
||||||
|
"Invalid type: disc_numbers type is %s; expected: <list>"
|
||||||
|
% type(disc_numbers)
|
||||||
|
)
|
||||||
if not timestamp:
|
if not timestamp:
|
||||||
timestamp = os.environ.get('SOURCE_DATE_EPOCH', "%f" % time.time())
|
timestamp = os.environ.get("SOURCE_DATE_EPOCH", "%f" % time.time())
|
||||||
with open(file_path, "w") as f:
|
with open(file_path, "w") as f:
|
||||||
f.write("%s\n" % timestamp)
|
f.write("%s\n" % timestamp)
|
||||||
f.write("%s\n" % description)
|
f.write("%s\n" % description)
|
||||||
|
@ -21,51 +21,58 @@ import time
|
|||||||
from ConfigParser import SafeConfigParser
|
from ConfigParser import SafeConfigParser
|
||||||
|
|
||||||
from .arch_utils import getBaseArch
|
from .arch_utils import getBaseArch
|
||||||
|
|
||||||
# In development, `here` will point to the bin/ directory with scripts.
|
# In development, `here` will point to the bin/ directory with scripts.
|
||||||
here = sys.path[0]
|
here = sys.path[0]
|
||||||
MULTILIBCONF = (os.path.join(os.path.dirname(__file__), '..', 'share', 'multilib')
|
MULTILIBCONF = (
|
||||||
if here != '/usr/bin'
|
os.path.join(os.path.dirname(__file__), "..", "share", "multilib")
|
||||||
else '/usr/share/pungi/multilib')
|
if here != "/usr/bin"
|
||||||
|
else "/usr/share/pungi/multilib"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Config(SafeConfigParser):
|
class Config(SafeConfigParser):
|
||||||
def __init__(self, pungirc=None):
|
def __init__(self, pungirc=None):
|
||||||
SafeConfigParser.__init__(self)
|
SafeConfigParser.__init__(self)
|
||||||
|
|
||||||
self.add_section('pungi')
|
self.add_section("pungi")
|
||||||
self.add_section('lorax')
|
self.add_section("lorax")
|
||||||
|
|
||||||
self.set('pungi', 'osdir', 'os')
|
self.set("pungi", "osdir", "os")
|
||||||
self.set('pungi', 'sourcedir', 'source')
|
self.set("pungi", "sourcedir", "source")
|
||||||
self.set('pungi', 'debugdir', 'debug')
|
self.set("pungi", "debugdir", "debug")
|
||||||
self.set('pungi', 'isodir', 'iso')
|
self.set("pungi", "isodir", "iso")
|
||||||
self.set('pungi', 'multilibconf', MULTILIBCONF)
|
self.set("pungi", "multilibconf", MULTILIBCONF)
|
||||||
self.set('pungi', 'relnotefilere', 'LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG')
|
self.set(
|
||||||
self.set('pungi', 'relnotedirre', '')
|
"pungi", "relnotefilere", "LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG"
|
||||||
self.set('pungi', 'relnotepkgs', 'fedora-repos fedora-release fedora-release-notes')
|
)
|
||||||
self.set('pungi', 'product_path', 'Packages')
|
self.set("pungi", "relnotedirre", "")
|
||||||
self.set('pungi', 'cachedir', '/var/cache/pungi')
|
self.set(
|
||||||
self.set('pungi', 'compress_type', 'xz')
|
"pungi", "relnotepkgs", "fedora-repos fedora-release fedora-release-notes"
|
||||||
self.set('pungi', 'arch', getBaseArch())
|
)
|
||||||
self.set('pungi', 'family', 'Fedora')
|
self.set("pungi", "product_path", "Packages")
|
||||||
self.set('pungi', 'iso_basename', 'Fedora')
|
self.set("pungi", "cachedir", "/var/cache/pungi")
|
||||||
self.set('pungi', 'version', time.strftime('%Y%m%d', time.localtime()))
|
self.set("pungi", "compress_type", "xz")
|
||||||
self.set('pungi', 'variant', '')
|
self.set("pungi", "arch", getBaseArch())
|
||||||
self.set('pungi', 'destdir', os.getcwd())
|
self.set("pungi", "family", "Fedora")
|
||||||
self.set('pungi', 'workdirbase', "/work")
|
self.set("pungi", "iso_basename", "Fedora")
|
||||||
self.set('pungi', 'bugurl', 'https://bugzilla.redhat.com')
|
self.set("pungi", "version", time.strftime("%Y%m%d", time.localtime()))
|
||||||
self.set('pungi', 'cdsize', '695.0')
|
self.set("pungi", "variant", "")
|
||||||
self.set('pungi', 'debuginfo', "True")
|
self.set("pungi", "destdir", os.getcwd())
|
||||||
self.set('pungi', 'alldeps', "True")
|
self.set("pungi", "workdirbase", "/work")
|
||||||
self.set('pungi', 'isfinal', "False")
|
self.set("pungi", "bugurl", "https://bugzilla.redhat.com")
|
||||||
self.set('pungi', 'nohash', "False")
|
self.set("pungi", "cdsize", "695.0")
|
||||||
self.set('pungi', 'full_archlist', "False")
|
self.set("pungi", "debuginfo", "True")
|
||||||
self.set('pungi', 'multilib', '')
|
self.set("pungi", "alldeps", "True")
|
||||||
self.set('pungi', 'lookaside_repos', '')
|
self.set("pungi", "isfinal", "False")
|
||||||
self.set('pungi', 'resolve_deps', "True")
|
self.set("pungi", "nohash", "False")
|
||||||
self.set('pungi', 'no_dvd', "False")
|
self.set("pungi", "full_archlist", "False")
|
||||||
self.set('pungi', 'nomacboot', "False")
|
self.set("pungi", "multilib", "")
|
||||||
self.set('pungi', 'rootfs_size', "False")
|
self.set("pungi", "lookaside_repos", "")
|
||||||
|
self.set("pungi", "resolve_deps", "True")
|
||||||
|
self.set("pungi", "no_dvd", "False")
|
||||||
|
self.set("pungi", "nomacboot", "False")
|
||||||
|
self.set("pungi", "rootfs_size", "False")
|
||||||
|
|
||||||
# if missing, self.read() is a noop, else change 'defaults'
|
# if missing, self.read() is a noop, else change 'defaults'
|
||||||
if pungirc:
|
if pungirc:
|
||||||
|
@ -11,10 +11,21 @@ from .wrappers import iso
|
|||||||
from .wrappers.jigdo import JigdoWrapper
|
from .wrappers.jigdo import JigdoWrapper
|
||||||
|
|
||||||
|
|
||||||
CreateIsoOpts = namedtuple('CreateIsoOpts',
|
CreateIsoOpts = namedtuple(
|
||||||
['buildinstall_method', 'arch', 'output_dir', 'jigdo_dir',
|
"CreateIsoOpts",
|
||||||
'iso_name', 'volid', 'graft_points', 'supported', 'os_tree',
|
[
|
||||||
"hfs_compat"])
|
"buildinstall_method",
|
||||||
|
"arch",
|
||||||
|
"output_dir",
|
||||||
|
"jigdo_dir",
|
||||||
|
"iso_name",
|
||||||
|
"volid",
|
||||||
|
"graft_points",
|
||||||
|
"supported",
|
||||||
|
"os_tree",
|
||||||
|
"hfs_compat",
|
||||||
|
],
|
||||||
|
)
|
||||||
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
|
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
|
||||||
|
|
||||||
|
|
||||||
@ -22,8 +33,8 @@ def quote(str):
|
|||||||
"""Quote an argument for shell, but make sure $TEMPLATE variable will be
|
"""Quote an argument for shell, but make sure $TEMPLATE variable will be
|
||||||
expanded.
|
expanded.
|
||||||
"""
|
"""
|
||||||
if str.startswith('$TEMPLATE'):
|
if str.startswith("$TEMPLATE"):
|
||||||
return '$TEMPLATE%s' % shlex_quote(str.replace('$TEMPLATE', '', 1))
|
return "$TEMPLATE%s" % shlex_quote(str.replace("$TEMPLATE", "", 1))
|
||||||
return shlex_quote(str)
|
return shlex_quote(str)
|
||||||
|
|
||||||
|
|
||||||
@ -32,38 +43,46 @@ def emit(f, cmd):
|
|||||||
if isinstance(cmd, six.string_types):
|
if isinstance(cmd, six.string_types):
|
||||||
print(cmd, file=f)
|
print(cmd, file=f)
|
||||||
else:
|
else:
|
||||||
print(' '.join([quote(x) for x in cmd]), file=f)
|
print(" ".join([quote(x) for x in cmd]), file=f)
|
||||||
|
|
||||||
|
|
||||||
FIND_TEMPLATE_SNIPPET = """
|
FIND_TEMPLATE_SNIPPET = """
|
||||||
if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then
|
if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then
|
||||||
TEMPLATE=/usr/share/lorax;
|
TEMPLATE=/usr/share/lorax;
|
||||||
fi
|
fi
|
||||||
""".replace('\n', '')
|
""".replace(
|
||||||
|
"\n", ""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def make_image(f, opts):
|
def make_image(f, opts):
|
||||||
mkisofs_kwargs = {}
|
mkisofs_kwargs = {}
|
||||||
|
|
||||||
if opts.buildinstall_method:
|
if opts.buildinstall_method:
|
||||||
if opts.buildinstall_method == 'lorax':
|
if opts.buildinstall_method == "lorax":
|
||||||
emit(f, FIND_TEMPLATE_SNIPPET)
|
emit(f, FIND_TEMPLATE_SNIPPET)
|
||||||
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
|
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
|
||||||
opts.arch,
|
opts.arch,
|
||||||
os.path.join("$TEMPLATE", "config_files/ppc"),
|
os.path.join("$TEMPLATE", "config_files/ppc"),
|
||||||
hfs_compat=opts.hfs_compat,
|
hfs_compat=opts.hfs_compat,
|
||||||
)
|
)
|
||||||
elif opts.buildinstall_method == 'buildinstall':
|
elif opts.buildinstall_method == "buildinstall":
|
||||||
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
|
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
|
||||||
opts.arch, "/usr/lib/anaconda-runtime/boot")
|
opts.arch, "/usr/lib/anaconda-runtime/boot"
|
||||||
|
)
|
||||||
|
|
||||||
# ppc(64) doesn't seem to support utf-8
|
# ppc(64) doesn't seem to support utf-8
|
||||||
if opts.arch in ("ppc", "ppc64", "ppc64le"):
|
if opts.arch in ("ppc", "ppc64", "ppc64le"):
|
||||||
mkisofs_kwargs["input_charset"] = None
|
mkisofs_kwargs["input_charset"] = None
|
||||||
|
|
||||||
cmd = iso.get_mkisofs_cmd(opts.iso_name, None, volid=opts.volid,
|
cmd = iso.get_mkisofs_cmd(
|
||||||
|
opts.iso_name,
|
||||||
|
None,
|
||||||
|
volid=opts.volid,
|
||||||
exclude=["./lost+found"],
|
exclude=["./lost+found"],
|
||||||
graft_points=opts.graft_points, **mkisofs_kwargs)
|
graft_points=opts.graft_points,
|
||||||
|
**mkisofs_kwargs
|
||||||
|
)
|
||||||
emit(f, cmd)
|
emit(f, cmd)
|
||||||
|
|
||||||
|
|
||||||
@ -88,22 +107,20 @@ def make_manifest(f, opts):
|
|||||||
|
|
||||||
def make_jigdo(f, opts):
|
def make_jigdo(f, opts):
|
||||||
jigdo = JigdoWrapper()
|
jigdo = JigdoWrapper()
|
||||||
files = [
|
files = [{"path": opts.os_tree, "label": None, "uri": None}]
|
||||||
{
|
cmd = jigdo.get_jigdo_cmd(
|
||||||
"path": opts.os_tree,
|
os.path.join(opts.output_dir, opts.iso_name),
|
||||||
"label": None,
|
files,
|
||||||
"uri": None,
|
output_dir=opts.jigdo_dir,
|
||||||
}
|
no_servers=True,
|
||||||
]
|
report="noprogress",
|
||||||
cmd = jigdo.get_jigdo_cmd(os.path.join(opts.output_dir, opts.iso_name),
|
)
|
||||||
files, output_dir=opts.jigdo_dir,
|
|
||||||
no_servers=True, report="noprogress")
|
|
||||||
emit(f, cmd)
|
emit(f, cmd)
|
||||||
|
|
||||||
|
|
||||||
def write_script(opts, f):
|
def write_script(opts, f):
|
||||||
if bool(opts.jigdo_dir) != bool(opts.os_tree):
|
if bool(opts.jigdo_dir) != bool(opts.os_tree):
|
||||||
raise RuntimeError('jigdo_dir must be used together with os_tree')
|
raise RuntimeError("jigdo_dir must be used together with os_tree")
|
||||||
|
|
||||||
emit(f, "#!/bin/bash")
|
emit(f, "#!/bin/bash")
|
||||||
emit(f, "set -ex")
|
emit(f, "set -ex")
|
||||||
|
@ -42,8 +42,8 @@ class Substitutions(dict):
|
|||||||
# DNF version of Substitutions detects host arch. We don't want that.
|
# DNF version of Substitutions detects host arch. We don't want that.
|
||||||
def __init__(self, arch):
|
def __init__(self, arch):
|
||||||
super(Substitutions, self).__init__()
|
super(Substitutions, self).__init__()
|
||||||
self['arch'] = arch
|
self["arch"] = arch
|
||||||
self['basearch'] = dnf_arch.basearch(arch)
|
self["basearch"] = dnf_arch.basearch(arch)
|
||||||
|
|
||||||
|
|
||||||
class DnfWrapper(dnf.Base):
|
class DnfWrapper(dnf.Base):
|
||||||
@ -52,8 +52,9 @@ class DnfWrapper(dnf.Base):
|
|||||||
self.arch_wrapper = ArchWrapper(self.conf.substitutions["arch"])
|
self.arch_wrapper = ArchWrapper(self.conf.substitutions["arch"])
|
||||||
self.comps_wrapper = CompsWrapper(self)
|
self.comps_wrapper = CompsWrapper(self)
|
||||||
|
|
||||||
def add_repo(self, repoid, baseurl=None, enablegroups=True, lookaside=False,
|
def add_repo(
|
||||||
**kwargs):
|
self, repoid, baseurl=None, enablegroups=True, lookaside=False, **kwargs
|
||||||
|
):
|
||||||
self.repos.add_new_repo(
|
self.repos.add_new_repo(
|
||||||
repoid,
|
repoid,
|
||||||
self.conf,
|
self.conf,
|
||||||
@ -83,7 +84,13 @@ class CompsWrapper(object):
|
|||||||
result[i.id] = i
|
result[i.id] = i
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def get_packages_from_group(self, group_id, include_default=True, include_optional=True, include_conditional=True):
|
def get_packages_from_group(
|
||||||
|
self,
|
||||||
|
group_id,
|
||||||
|
include_default=True,
|
||||||
|
include_optional=True,
|
||||||
|
include_conditional=True,
|
||||||
|
):
|
||||||
packages = []
|
packages = []
|
||||||
conditional = []
|
conditional = []
|
||||||
|
|
||||||
@ -117,9 +124,11 @@ class CompsWrapper(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
include_default = group_include in (1, 2)
|
include_default = group_include in (1, 2)
|
||||||
include_optional = group_include in (2, )
|
include_optional = group_include in (2,)
|
||||||
include_conditional = True
|
include_conditional = True
|
||||||
pkgs, cond = self.get_packages_from_group(group_id, include_default, include_optional, include_conditional)
|
pkgs, cond = self.get_packages_from_group(
|
||||||
|
group_id, include_default, include_optional, include_conditional
|
||||||
|
)
|
||||||
packages.update(pkgs)
|
packages.update(pkgs)
|
||||||
for i in cond:
|
for i in cond:
|
||||||
if i not in conditional:
|
if i not in conditional:
|
||||||
@ -136,7 +145,11 @@ class CompsWrapper(object):
|
|||||||
class ArchWrapper(object):
|
class ArchWrapper(object):
|
||||||
def __init__(self, arch):
|
def __init__(self, arch):
|
||||||
self.base_arch = dnf_arch.basearch(arch)
|
self.base_arch = dnf_arch.basearch(arch)
|
||||||
self.all_arches = pungi.arch.get_valid_arches(self.base_arch, multilib=True, add_noarch=True)
|
self.all_arches = pungi.arch.get_valid_arches(
|
||||||
self.native_arches = pungi.arch.get_valid_arches(self.base_arch, multilib=False, add_noarch=True)
|
self.base_arch, multilib=True, add_noarch=True
|
||||||
|
)
|
||||||
|
self.native_arches = pungi.arch.get_valid_arches(
|
||||||
|
self.base_arch, multilib=False, add_noarch=True
|
||||||
|
)
|
||||||
self.multilib_arches = pungi.arch.get_valid_multilib_arches(self.base_arch)
|
self.multilib_arches = pungi.arch.get_valid_multilib_arches(self.base_arch)
|
||||||
self.source_arches = ["src", "nosrc"]
|
self.source_arches = ["src", "nosrc"]
|
||||||
|
1105
pungi/gather.py
1105
pungi/gather.py
File diff suppressed because it is too large
Load Diff
@ -32,7 +32,7 @@ from pungi.util import DEBUG_PATTERNS
|
|||||||
|
|
||||||
def get_source_name(pkg):
|
def get_source_name(pkg):
|
||||||
# Workaround for rhbz#1418298
|
# Workaround for rhbz#1418298
|
||||||
return pkg.sourcerpm.rsplit('-', 2)[0]
|
return pkg.sourcerpm.rsplit("-", 2)[0]
|
||||||
|
|
||||||
|
|
||||||
class GatherOptions(pungi.common.OptionsBase):
|
class GatherOptions(pungi.common.OptionsBase):
|
||||||
@ -79,21 +79,21 @@ class GatherOptions(pungi.common.OptionsBase):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
lines = [
|
lines = [
|
||||||
'fulltree=%s' % self.fulltree,
|
"fulltree=%s" % self.fulltree,
|
||||||
'fulltree_excludes=%d items' % len(self.fulltree_excludes),
|
"fulltree_excludes=%d items" % len(self.fulltree_excludes),
|
||||||
'resolve_deps=%s' % self.resolve_deps,
|
"resolve_deps=%s" % self.resolve_deps,
|
||||||
'selfhosting=%s' % self.selfhosting,
|
"selfhosting=%s" % self.selfhosting,
|
||||||
'greedy_method=%s' % self.greedy_method,
|
"greedy_method=%s" % self.greedy_method,
|
||||||
'langpacks=%s' % self.langpacks,
|
"langpacks=%s" % self.langpacks,
|
||||||
'multilib_methods=%s' % self.multilib_methods,
|
"multilib_methods=%s" % self.multilib_methods,
|
||||||
'multilib_blacklist=%d items' % len(self.multilib_blacklist),
|
"multilib_blacklist=%d items" % len(self.multilib_blacklist),
|
||||||
'multilib_whitelist=%d items' % len(self.multilib_whitelist),
|
"multilib_whitelist=%d items" % len(self.multilib_whitelist),
|
||||||
'lookaside_repos=%s' % self.lookaside_repos,
|
"lookaside_repos=%s" % self.lookaside_repos,
|
||||||
'prepopulate=%d items' % len(self.prepopulate),
|
"prepopulate=%d items" % len(self.prepopulate),
|
||||||
'exclude_source=%s' % self.exclude_source,
|
"exclude_source=%s" % self.exclude_source,
|
||||||
'exclude_debug=%s' % self.exclude_debug
|
"exclude_debug=%s" % self.exclude_debug,
|
||||||
]
|
]
|
||||||
return '[\n%s\n]' % '\n'.join(' ' + l for l in lines)
|
return "[\n%s\n]" % "\n".join(" " + l for l in lines)
|
||||||
|
|
||||||
|
|
||||||
class QueryCache(object):
|
class QueryCache(object):
|
||||||
@ -142,7 +142,9 @@ class GatherBase(object):
|
|||||||
# lookaside.
|
# lookaside.
|
||||||
|
|
||||||
# source packages
|
# source packages
|
||||||
self.q_source_packages = q.filter(arch=self.dnf.arch_wrapper.source_arches).apply()
|
self.q_source_packages = q.filter(
|
||||||
|
arch=self.dnf.arch_wrapper.source_arches
|
||||||
|
).apply()
|
||||||
q = q.difference(self.q_source_packages)
|
q = q.difference(self.q_source_packages)
|
||||||
|
|
||||||
# filter arches
|
# filter arches
|
||||||
@ -191,8 +193,12 @@ class Gather(GatherBase):
|
|||||||
if not self.logger.handlers:
|
if not self.logger.handlers:
|
||||||
# default logging handler
|
# default logging handler
|
||||||
handler = logging.StreamHandler()
|
handler = logging.StreamHandler()
|
||||||
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s",
|
handler.setFormatter(
|
||||||
datefmt="%Y-%m-%d %H:%M:%S"))
|
logging.Formatter(
|
||||||
|
"%(asctime)s [%(levelname)-8s] %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
)
|
||||||
|
)
|
||||||
handler.setLevel(logging.DEBUG)
|
handler.setLevel(logging.DEBUG)
|
||||||
self.logger.addHandler(handler)
|
self.logger.addHandler(handler)
|
||||||
|
|
||||||
@ -202,7 +208,8 @@ class Gather(GatherBase):
|
|||||||
self.dnf._sack,
|
self.dnf._sack,
|
||||||
gather_options.multilib_methods,
|
gather_options.multilib_methods,
|
||||||
blacklist=self.opts.multilib_blacklist,
|
blacklist=self.opts.multilib_blacklist,
|
||||||
whitelist=self.opts.multilib_whitelist)
|
whitelist=self.opts.multilib_whitelist,
|
||||||
|
)
|
||||||
|
|
||||||
# already processed packages
|
# already processed packages
|
||||||
self.finished_add_binary_package_deps = {} # {pkg: [deps]}
|
self.finished_add_binary_package_deps = {} # {pkg: [deps]}
|
||||||
@ -254,11 +261,17 @@ class Gather(GatherBase):
|
|||||||
all_pkgs.append(pkg)
|
all_pkgs.append(pkg)
|
||||||
|
|
||||||
if not debuginfo:
|
if not debuginfo:
|
||||||
native_pkgs = set(self.q_native_binary_packages.filter(pkg=all_pkgs).apply())
|
native_pkgs = set(
|
||||||
multilib_pkgs = set(self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply())
|
self.q_native_binary_packages.filter(pkg=all_pkgs).apply()
|
||||||
|
)
|
||||||
|
multilib_pkgs = set(
|
||||||
|
self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply()
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
native_pkgs = set(self.q_native_debug_packages.filter(pkg=all_pkgs).apply())
|
native_pkgs = set(self.q_native_debug_packages.filter(pkg=all_pkgs).apply())
|
||||||
multilib_pkgs = set(self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply())
|
multilib_pkgs = set(
|
||||||
|
self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply()
|
||||||
|
)
|
||||||
|
|
||||||
result = set()
|
result = set()
|
||||||
|
|
||||||
@ -307,7 +320,7 @@ class Gather(GatherBase):
|
|||||||
version=pkg.version,
|
version=pkg.version,
|
||||||
release=pkg.release,
|
release=pkg.release,
|
||||||
arch=pkg.arch,
|
arch=pkg.arch,
|
||||||
reponame=self.opts.lookaside_repos
|
reponame=self.opts.lookaside_repos,
|
||||||
)
|
)
|
||||||
return pkg in pkgs
|
return pkg in pkgs
|
||||||
|
|
||||||
@ -328,7 +341,7 @@ class Gather(GatherBase):
|
|||||||
# lookaside
|
# lookaside
|
||||||
if self.is_from_lookaside(i):
|
if self.is_from_lookaside(i):
|
||||||
self._set_flag(i, PkgFlag.lookaside)
|
self._set_flag(i, PkgFlag.lookaside)
|
||||||
if i.sourcerpm.rsplit('-', 2)[0] in self.opts.fulltree_excludes:
|
if i.sourcerpm.rsplit("-", 2)[0] in self.opts.fulltree_excludes:
|
||||||
self._set_flag(i, PkgFlag.fulltree_exclude)
|
self._set_flag(i, PkgFlag.fulltree_exclude)
|
||||||
|
|
||||||
def _get_package_deps(self, pkg, debuginfo=False):
|
def _get_package_deps(self, pkg, debuginfo=False):
|
||||||
@ -350,8 +363,8 @@ class Gather(GatherBase):
|
|||||||
# empty.
|
# empty.
|
||||||
requires = (
|
requires = (
|
||||||
pkg.requires
|
pkg.requires
|
||||||
+ getattr(pkg, 'requires_pre', [])
|
+ getattr(pkg, "requires_pre", [])
|
||||||
+ getattr(pkg, 'requires_post', [])
|
+ getattr(pkg, "requires_post", [])
|
||||||
)
|
)
|
||||||
|
|
||||||
q = queue.filter(provides=requires).apply()
|
q = queue.filter(provides=requires).apply()
|
||||||
@ -378,7 +391,9 @@ class Gather(GatherBase):
|
|||||||
"""Given an name of a queue (stored as attribute in `self`), exclude
|
"""Given an name of a queue (stored as attribute in `self`), exclude
|
||||||
all given packages and keep only the latest per package name and arch.
|
all given packages and keep only the latest per package name and arch.
|
||||||
"""
|
"""
|
||||||
setattr(self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply())
|
setattr(
|
||||||
|
self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply()
|
||||||
|
)
|
||||||
|
|
||||||
@Profiler("Gather._apply_excludes()")
|
@Profiler("Gather._apply_excludes()")
|
||||||
def _apply_excludes(self, excludes):
|
def _apply_excludes(self, excludes):
|
||||||
@ -395,20 +410,22 @@ class Gather(GatherBase):
|
|||||||
with Profiler("Gather._apply_excludes():exclude"):
|
with Profiler("Gather._apply_excludes():exclude"):
|
||||||
if pattern.endswith(".+"):
|
if pattern.endswith(".+"):
|
||||||
pkgs = self.q_multilib_binary_packages.filter(
|
pkgs = self.q_multilib_binary_packages.filter(
|
||||||
name__glob=pattern[:-2], arch__neq='noarch',
|
name__glob=pattern[:-2],
|
||||||
reponame__neq=self.opts.lookaside_repos)
|
arch__neq="noarch",
|
||||||
|
reponame__neq=self.opts.lookaside_repos,
|
||||||
|
)
|
||||||
elif pattern.endswith(".src"):
|
elif pattern.endswith(".src"):
|
||||||
pkgs = self.q_source_packages.filter(
|
pkgs = self.q_source_packages.filter(
|
||||||
name__glob=pattern[:-4],
|
name__glob=pattern[:-4], reponame__neq=self.opts.lookaside_repos
|
||||||
reponame__neq=self.opts.lookaside_repos)
|
)
|
||||||
elif pungi.util.pkg_is_debug(pattern):
|
elif pungi.util.pkg_is_debug(pattern):
|
||||||
pkgs = self.q_debug_packages.filter(
|
pkgs = self.q_debug_packages.filter(
|
||||||
name__glob=pattern,
|
name__glob=pattern, reponame__neq=self.opts.lookaside_repos
|
||||||
reponame__neq=self.opts.lookaside_repos)
|
)
|
||||||
else:
|
else:
|
||||||
pkgs = self.q_binary_packages.filter(
|
pkgs = self.q_binary_packages.filter(
|
||||||
name__glob=pattern,
|
name__glob=pattern, reponame__neq=self.opts.lookaside_repos
|
||||||
reponame__neq=self.opts.lookaside_repos)
|
)
|
||||||
|
|
||||||
exclude.update(pkgs)
|
exclude.update(pkgs)
|
||||||
self.logger.debug("EXCLUDED by %s: %s", pattern, [str(p) for p in pkgs])
|
self.logger.debug("EXCLUDED by %s: %s", pattern, [str(p) for p in pkgs])
|
||||||
@ -417,15 +434,22 @@ class Gather(GatherBase):
|
|||||||
for pattern in self.opts.multilib_blacklist:
|
for pattern in self.opts.multilib_blacklist:
|
||||||
with Profiler("Gather._apply_excludes():exclude-multilib-blacklist"):
|
with Profiler("Gather._apply_excludes():exclude-multilib-blacklist"):
|
||||||
# TODO: does whitelist affect this in any way?
|
# TODO: does whitelist affect this in any way?
|
||||||
pkgs = self.q_multilib_binary_packages.filter(name__glob=pattern, arch__neq='noarch')
|
pkgs = self.q_multilib_binary_packages.filter(
|
||||||
|
name__glob=pattern, arch__neq="noarch"
|
||||||
|
)
|
||||||
exclude.update(pkgs)
|
exclude.update(pkgs)
|
||||||
self.logger.debug("EXCLUDED by %s: %s", pattern, [str(p) for p in pkgs])
|
self.logger.debug("EXCLUDED by %s: %s", pattern, [str(p) for p in pkgs])
|
||||||
self.dnf._sack.add_excludes(pkgs)
|
self.dnf._sack.add_excludes(pkgs)
|
||||||
|
|
||||||
all_queues = ['q_binary_packages', 'q_native_binary_packages',
|
all_queues = [
|
||||||
'q_multilib_binary_packages', 'q_noarch_binary_packages',
|
"q_binary_packages",
|
||||||
'q_source_packages', 'q_native_debug_packages',
|
"q_native_binary_packages",
|
||||||
'q_multilib_debug_packages']
|
"q_multilib_binary_packages",
|
||||||
|
"q_noarch_binary_packages",
|
||||||
|
"q_source_packages",
|
||||||
|
"q_native_debug_packages",
|
||||||
|
"q_multilib_debug_packages",
|
||||||
|
]
|
||||||
|
|
||||||
with Profiler("Gather._apply_excludes():exclude-queries"):
|
with Profiler("Gather._apply_excludes():exclude-queries"):
|
||||||
for queue in all_queues:
|
for queue in all_queues:
|
||||||
@ -449,10 +473,14 @@ class Gather(GatherBase):
|
|||||||
for pattern in includes:
|
for pattern in includes:
|
||||||
with Profiler("Gather.add_initial_packages():include"):
|
with Profiler("Gather.add_initial_packages():include"):
|
||||||
if pattern == "system-release" and self.opts.greedy_method == "all":
|
if pattern == "system-release" and self.opts.greedy_method == "all":
|
||||||
pkgs = self.q_binary_packages.filter(provides="system-release").apply()
|
pkgs = self.q_binary_packages.filter(
|
||||||
|
provides="system-release"
|
||||||
|
).apply()
|
||||||
else:
|
else:
|
||||||
if pattern.endswith(".+"):
|
if pattern.endswith(".+"):
|
||||||
pkgs = self.q_multilib_binary_packages.filter(name__glob=pattern[:-2]).apply()
|
pkgs = self.q_multilib_binary_packages.filter(
|
||||||
|
name__glob=pattern[:-2]
|
||||||
|
).apply()
|
||||||
else:
|
else:
|
||||||
pkgs = self.q_binary_packages.filter(name__glob=pattern).apply()
|
pkgs = self.q_binary_packages.filter(name__glob=pattern).apply()
|
||||||
|
|
||||||
@ -482,19 +510,37 @@ class Gather(GatherBase):
|
|||||||
# Must be executed *after* add_initial_packages() to exclude packages properly.
|
# Must be executed *after* add_initial_packages() to exclude packages properly.
|
||||||
|
|
||||||
# source
|
# source
|
||||||
self.source_pkgs_cache = QueryCache(self.q_source_packages, "name", "version", "release")
|
self.source_pkgs_cache = QueryCache(
|
||||||
|
self.q_source_packages, "name", "version", "release"
|
||||||
|
)
|
||||||
|
|
||||||
# debug
|
# debug
|
||||||
self.native_debug_packages_cache = QueryCache(self.q_native_debug_packages, "sourcerpm")
|
self.native_debug_packages_cache = QueryCache(
|
||||||
self.multilib_debug_packages_cache = QueryCache(self.q_multilib_debug_packages, "sourcerpm")
|
self.q_native_debug_packages, "sourcerpm"
|
||||||
|
)
|
||||||
|
self.multilib_debug_packages_cache = QueryCache(
|
||||||
|
self.q_multilib_debug_packages, "sourcerpm"
|
||||||
|
)
|
||||||
|
|
||||||
# packages by sourcerpm
|
# packages by sourcerpm
|
||||||
self.q_native_pkgs_by_sourcerpm_cache = QueryCache(self.q_native_binary_packages, "sourcerpm", arch__neq="noarch")
|
self.q_native_pkgs_by_sourcerpm_cache = QueryCache(
|
||||||
self.q_multilib_pkgs_by_sourcerpm_cache = QueryCache(self.q_multilib_binary_packages, "sourcerpm", arch__neq="noarch")
|
self.q_native_binary_packages, "sourcerpm", arch__neq="noarch"
|
||||||
self.q_noarch_pkgs_by_sourcerpm_cache = QueryCache(self.q_native_binary_packages, "sourcerpm", arch="noarch")
|
)
|
||||||
|
self.q_multilib_pkgs_by_sourcerpm_cache = QueryCache(
|
||||||
|
self.q_multilib_binary_packages, "sourcerpm", arch__neq="noarch"
|
||||||
|
)
|
||||||
|
self.q_noarch_pkgs_by_sourcerpm_cache = QueryCache(
|
||||||
|
self.q_native_binary_packages, "sourcerpm", arch="noarch"
|
||||||
|
)
|
||||||
|
|
||||||
# multilib
|
# multilib
|
||||||
self.q_multilib_binary_packages_cache = QueryCache(self.q_multilib_binary_packages, "name", "version", "release", arch__neq="noarch")
|
self.q_multilib_binary_packages_cache = QueryCache(
|
||||||
|
self.q_multilib_binary_packages,
|
||||||
|
"name",
|
||||||
|
"version",
|
||||||
|
"release",
|
||||||
|
arch__neq="noarch",
|
||||||
|
)
|
||||||
|
|
||||||
# prepopulate
|
# prepopulate
|
||||||
self.prepopulate_cache = QueryCache(self.q_binary_packages, "name", "arch")
|
self.prepopulate_cache = QueryCache(self.q_binary_packages, "name", "arch")
|
||||||
@ -531,7 +577,9 @@ class Gather(GatherBase):
|
|||||||
deps = self._get_package_deps(pkg)
|
deps = self._get_package_deps(pkg)
|
||||||
for i, req in deps:
|
for i, req in deps:
|
||||||
if i not in self.result_binary_packages:
|
if i not in self.result_binary_packages:
|
||||||
self._add_packages([i], pulled_by=pkg, req=req, reason='binary-dep')
|
self._add_packages(
|
||||||
|
[i], pulled_by=pkg, req=req, reason="binary-dep"
|
||||||
|
)
|
||||||
added.add(i)
|
added.add(i)
|
||||||
self.finished_add_binary_package_deps[pkg] = deps
|
self.finished_add_binary_package_deps[pkg] = deps
|
||||||
|
|
||||||
@ -593,7 +641,7 @@ class Gather(GatherBase):
|
|||||||
|
|
||||||
for i in deps:
|
for i in deps:
|
||||||
if i not in self.result_binary_packages:
|
if i not in self.result_binary_packages:
|
||||||
self._add_packages([i], pulled_by=pkg, reason='cond-dep')
|
self._add_packages([i], pulled_by=pkg, reason="cond-dep")
|
||||||
self._set_flag(pkg, PkgFlag.conditional)
|
self._set_flag(pkg, PkgFlag.conditional)
|
||||||
added.add(i)
|
added.add(i)
|
||||||
|
|
||||||
@ -617,10 +665,14 @@ class Gather(GatherBase):
|
|||||||
deps = self.finished_add_source_package_deps[pkg]
|
deps = self.finished_add_source_package_deps[pkg]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
deps = self._get_package_deps(pkg)
|
deps = self._get_package_deps(pkg)
|
||||||
self.finished_add_source_package_deps[pkg] = set(dep for (dep, req) in deps)
|
self.finished_add_source_package_deps[pkg] = set(
|
||||||
|
dep for (dep, req) in deps
|
||||||
|
)
|
||||||
for i, req in deps:
|
for i, req in deps:
|
||||||
if i not in self.result_binary_packages:
|
if i not in self.result_binary_packages:
|
||||||
self._add_packages([i], pulled_by=pkg, req=req, reason='source-dep')
|
self._add_packages(
|
||||||
|
[i], pulled_by=pkg, req=req, reason="source-dep"
|
||||||
|
)
|
||||||
added.add(i)
|
added.add(i)
|
||||||
self._set_flag(pkg, PkgFlag.self_hosting)
|
self._set_flag(pkg, PkgFlag.self_hosting)
|
||||||
|
|
||||||
@ -658,7 +710,9 @@ class Gather(GatherBase):
|
|||||||
source_pkg = self.sourcerpm_cache.get(pkg.sourcerpm, None)
|
source_pkg = self.sourcerpm_cache.get(pkg.sourcerpm, None)
|
||||||
if source_pkg is None:
|
if source_pkg is None:
|
||||||
nvra = parse_nvra(pkg.sourcerpm)
|
nvra = parse_nvra(pkg.sourcerpm)
|
||||||
source_pkgs = self.source_pkgs_cache.get(nvra["name"], nvra["version"], nvra["release"])
|
source_pkgs = self.source_pkgs_cache.get(
|
||||||
|
nvra["name"], nvra["version"], nvra["release"]
|
||||||
|
)
|
||||||
if source_pkgs:
|
if source_pkgs:
|
||||||
source_pkg = self._get_matching_srpm(pkg, source_pkgs)
|
source_pkg = self._get_matching_srpm(pkg, source_pkgs)
|
||||||
self.sourcerpm_cache[pkg.sourcerpm] = source_pkg
|
self.sourcerpm_cache[pkg.sourcerpm] = source_pkg
|
||||||
@ -667,8 +721,10 @@ class Gather(GatherBase):
|
|||||||
if not source_pkg:
|
if not source_pkg:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if (source_pkg.repoid in self.opts.lookaside_repos
|
if (
|
||||||
or pkg.repoid in self.opts.lookaside_repos):
|
source_pkg.repoid in self.opts.lookaside_repos
|
||||||
|
or pkg.repoid in self.opts.lookaside_repos
|
||||||
|
):
|
||||||
self._set_flag(source_pkg, PkgFlag.lookaside)
|
self._set_flag(source_pkg, PkgFlag.lookaside)
|
||||||
if source_pkg not in self.result_source_packages:
|
if source_pkg not in self.result_source_packages:
|
||||||
added.add(source_pkg)
|
added.add(source_pkg)
|
||||||
@ -741,15 +797,21 @@ class Gather(GatherBase):
|
|||||||
assert pkg is not None
|
assert pkg is not None
|
||||||
|
|
||||||
if get_source_name(pkg) in self.opts.fulltree_excludes:
|
if get_source_name(pkg) in self.opts.fulltree_excludes:
|
||||||
self.logger.debug('No fulltree for %s due to exclude list', pkg)
|
self.logger.debug("No fulltree for %s due to exclude list", pkg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fulltree_pkgs = self.finished_add_fulltree_packages[pkg]
|
fulltree_pkgs = self.finished_add_fulltree_packages[pkg]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
native_fulltree_pkgs = self.q_native_pkgs_by_sourcerpm_cache.get(pkg.sourcerpm) or []
|
native_fulltree_pkgs = (
|
||||||
multilib_fulltree_pkgs = self.q_multilib_pkgs_by_sourcerpm_cache.get(pkg.sourcerpm) or []
|
self.q_native_pkgs_by_sourcerpm_cache.get(pkg.sourcerpm) or []
|
||||||
noarch_fulltree_pkgs = self.q_noarch_pkgs_by_sourcerpm_cache.get(pkg.sourcerpm) or []
|
)
|
||||||
|
multilib_fulltree_pkgs = (
|
||||||
|
self.q_multilib_pkgs_by_sourcerpm_cache.get(pkg.sourcerpm) or []
|
||||||
|
)
|
||||||
|
noarch_fulltree_pkgs = (
|
||||||
|
self.q_noarch_pkgs_by_sourcerpm_cache.get(pkg.sourcerpm) or []
|
||||||
|
)
|
||||||
|
|
||||||
if not native_fulltree_pkgs:
|
if not native_fulltree_pkgs:
|
||||||
# no existing native pkgs -> pull multilib
|
# no existing native pkgs -> pull multilib
|
||||||
@ -767,9 +829,9 @@ class Gather(GatherBase):
|
|||||||
# We pull packages determined by `pull_native`, or everything
|
# We pull packages determined by `pull_native`, or everything
|
||||||
# if we're greedy
|
# if we're greedy
|
||||||
fulltree_pkgs = []
|
fulltree_pkgs = []
|
||||||
if pull_native or self.opts.greedy_method == 'all':
|
if pull_native or self.opts.greedy_method == "all":
|
||||||
fulltree_pkgs.extend(native_fulltree_pkgs)
|
fulltree_pkgs.extend(native_fulltree_pkgs)
|
||||||
if not pull_native or self.opts.greedy_method == 'all':
|
if not pull_native or self.opts.greedy_method == "all":
|
||||||
fulltree_pkgs.extend(multilib_fulltree_pkgs)
|
fulltree_pkgs.extend(multilib_fulltree_pkgs)
|
||||||
|
|
||||||
# always pull all noarch subpackages
|
# always pull all noarch subpackages
|
||||||
@ -777,7 +839,7 @@ class Gather(GatherBase):
|
|||||||
|
|
||||||
for i in fulltree_pkgs:
|
for i in fulltree_pkgs:
|
||||||
if i not in self.result_binary_packages:
|
if i not in self.result_binary_packages:
|
||||||
self._add_packages([i], reason='fulltree')
|
self._add_packages([i], reason="fulltree")
|
||||||
self._set_flag(i, PkgFlag.fulltree)
|
self._set_flag(i, PkgFlag.fulltree)
|
||||||
added.add(i)
|
added.add(i)
|
||||||
|
|
||||||
@ -809,15 +871,21 @@ class Gather(GatherBase):
|
|||||||
try:
|
try:
|
||||||
langpack_pkgs = self.finished_add_langpack_packages[pkg]
|
langpack_pkgs = self.finished_add_langpack_packages[pkg]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
patterns = [i["install"] for i in langpack_patterns if i["name"] == pkg.name]
|
patterns = [
|
||||||
|
i["install"] for i in langpack_patterns if i["name"] == pkg.name
|
||||||
|
]
|
||||||
patterns = [i.replace("%s", "*") for i in patterns]
|
patterns = [i.replace("%s", "*") for i in patterns]
|
||||||
|
|
||||||
if not patterns:
|
if not patterns:
|
||||||
self.finished_add_langpack_packages[pkg] = []
|
self.finished_add_langpack_packages[pkg] = []
|
||||||
continue
|
continue
|
||||||
|
|
||||||
langpack_pkgs = self.q_binary_packages.filter(name__glob=patterns).apply()
|
langpack_pkgs = self.q_binary_packages.filter(
|
||||||
langpack_pkgs = langpack_pkgs.filter(name__glob__not=["*-devel", "*-static"])
|
name__glob=patterns
|
||||||
|
).apply()
|
||||||
|
langpack_pkgs = langpack_pkgs.filter(
|
||||||
|
name__glob__not=["*-devel", "*-static"]
|
||||||
|
)
|
||||||
langpack_pkgs = langpack_pkgs.filter(name__neq=exceptions)
|
langpack_pkgs = langpack_pkgs.filter(name__neq=exceptions)
|
||||||
|
|
||||||
pkgs_by_name = {}
|
pkgs_by_name = {}
|
||||||
@ -834,7 +902,7 @@ class Gather(GatherBase):
|
|||||||
langpack_pkgs.add(i)
|
langpack_pkgs.add(i)
|
||||||
self._set_flag(i, PkgFlag.langpack)
|
self._set_flag(i, PkgFlag.langpack)
|
||||||
if i not in self.result_binary_packages:
|
if i not in self.result_binary_packages:
|
||||||
self._add_packages([i], pulled_by=pkg, reason='langpack')
|
self._add_packages([i], pulled_by=pkg, reason="langpack")
|
||||||
added.add(pkg)
|
added.add(pkg)
|
||||||
self.finished_add_langpack_packages[pkg] = langpack_pkgs
|
self.finished_add_langpack_packages[pkg] = langpack_pkgs
|
||||||
|
|
||||||
@ -856,7 +924,9 @@ class Gather(GatherBase):
|
|||||||
self.finished_add_multilib_packages[pkg] = None
|
self.finished_add_multilib_packages[pkg] = None
|
||||||
continue
|
continue
|
||||||
|
|
||||||
pkgs = self.q_multilib_binary_packages_cache.get(pkg.name, pkg.version, pkg.release)
|
pkgs = self.q_multilib_binary_packages_cache.get(
|
||||||
|
pkg.name, pkg.version, pkg.release
|
||||||
|
)
|
||||||
pkgs = self._get_best_package(pkgs)
|
pkgs = self._get_best_package(pkgs)
|
||||||
multilib_pkgs = []
|
multilib_pkgs = []
|
||||||
for i in pkgs:
|
for i in pkgs:
|
||||||
@ -865,7 +935,7 @@ class Gather(GatherBase):
|
|||||||
multilib_pkgs.append(i)
|
multilib_pkgs.append(i)
|
||||||
added.add(i)
|
added.add(i)
|
||||||
self._set_flag(i, PkgFlag.multilib)
|
self._set_flag(i, PkgFlag.multilib)
|
||||||
self._add_packages([i], reason='multilib:%s' % is_multilib)
|
self._add_packages([i], reason="multilib:%s" % is_multilib)
|
||||||
self.finished_add_multilib_packages[pkg] = i
|
self.finished_add_multilib_packages[pkg] = i
|
||||||
# TODO: ^^^ may get multiple results; i686, i586, etc.
|
# TODO: ^^^ may get multiple results; i686, i586, etc.
|
||||||
|
|
||||||
@ -879,45 +949,51 @@ class Gather(GatherBase):
|
|||||||
added = self.add_initial_packages(pattern_list)
|
added = self.add_initial_packages(pattern_list)
|
||||||
self._add_packages(added)
|
self._add_packages(added)
|
||||||
|
|
||||||
added = self.log_count('PREPOPULATE', self.add_prepopulate_packages)
|
added = self.log_count("PREPOPULATE", self.add_prepopulate_packages)
|
||||||
self._add_packages(added, reason='prepopulate')
|
self._add_packages(added, reason="prepopulate")
|
||||||
|
|
||||||
for pass_num in count(1):
|
for pass_num in count(1):
|
||||||
self.logger.debug("PASS %s" % pass_num)
|
self.logger.debug("PASS %s" % pass_num)
|
||||||
|
|
||||||
if self.log_count('CONDITIONAL DEPS', self.add_conditional_packages):
|
if self.log_count("CONDITIONAL DEPS", self.add_conditional_packages):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# resolve deps
|
# resolve deps
|
||||||
if self.log_count('BINARY DEPS', self.add_binary_package_deps):
|
if self.log_count("BINARY DEPS", self.add_binary_package_deps):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.log_count('SOURCE DEPS', self.add_source_package_deps):
|
if self.log_count("SOURCE DEPS", self.add_source_package_deps):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.log_count('SOURCE PACKAGES', self.add_source_packages):
|
if self.log_count("SOURCE PACKAGES", self.add_source_packages):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.log_count('DEBUG PACKAGES', self.add_debug_packages):
|
if self.log_count("DEBUG PACKAGES", self.add_debug_packages):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.log_count("DEBUG DEPS", self.add_debug_package_deps):
|
if self.log_count("DEBUG DEPS", self.add_debug_package_deps):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.log_count('FULLTREE', self.add_fulltree_packages):
|
if self.log_count("FULLTREE", self.add_fulltree_packages):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.log_count('LANGPACKS', self.add_langpack_packages, self.opts.langpacks):
|
if self.log_count(
|
||||||
|
"LANGPACKS", self.add_langpack_packages, self.opts.langpacks
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.log_count('MULTILIB', self.add_multilib_packages):
|
if self.log_count("MULTILIB", self.add_multilib_packages):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# nothing added -> break depsolving cycle
|
# nothing added -> break depsolving cycle
|
||||||
break
|
break
|
||||||
|
|
||||||
def download(self, destdir):
|
def download(self, destdir):
|
||||||
pkglist = (self.result_binary_packages | self.result_debug_packages | self.result_source_packages)
|
pkglist = (
|
||||||
|
self.result_binary_packages
|
||||||
|
| self.result_debug_packages
|
||||||
|
| self.result_source_packages
|
||||||
|
)
|
||||||
self.dnf.download_packages(pkglist)
|
self.dnf.download_packages(pkglist)
|
||||||
linker = Linker(logger=self.logger)
|
linker = Linker(logger=self.logger)
|
||||||
|
|
||||||
@ -937,7 +1013,7 @@ class Gather(GatherBase):
|
|||||||
Print a message, run the function with given arguments and log length
|
Print a message, run the function with given arguments and log length
|
||||||
of result.
|
of result.
|
||||||
"""
|
"""
|
||||||
self.logger.debug('%s', msg)
|
self.logger.debug("%s", msg)
|
||||||
added = method(*args)
|
added = method(*args)
|
||||||
self.logger.debug('ADDED: %s', len(added))
|
self.logger.debug("ADDED: %s", len(added))
|
||||||
return added
|
return added
|
||||||
|
@ -8,6 +8,7 @@ class SimpleAcyclicOrientedGraph(object):
|
|||||||
Graph is constructed by adding oriented edges one by one. It can not contain cycles.
|
Graph is constructed by adding oriented edges one by one. It can not contain cycles.
|
||||||
Main result is spanning line, it determines ordering of the nodes.
|
Main result is spanning line, it determines ordering of the nodes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._graph = {}
|
self._graph = {}
|
||||||
self._all_nodes = set()
|
self._all_nodes = set()
|
||||||
@ -18,7 +19,9 @@ class SimpleAcyclicOrientedGraph(object):
|
|||||||
This operation must not create a cycle in the graph.
|
This operation must not create a cycle in the graph.
|
||||||
"""
|
"""
|
||||||
if start == end:
|
if start == end:
|
||||||
raise ValueError("Can not add this kind of edge into graph: %s-%s" % (start, end))
|
raise ValueError(
|
||||||
|
"Can not add this kind of edge into graph: %s-%s" % (start, end)
|
||||||
|
)
|
||||||
self._graph.setdefault(start, [])
|
self._graph.setdefault(start, [])
|
||||||
if end not in self._graph[start]:
|
if end not in self._graph[start]:
|
||||||
self._graph[start].append(end)
|
self._graph[start].append(end)
|
||||||
|
28
pungi/ks.py
28
pungi/ks.py
@ -82,7 +82,7 @@ class FulltreeExcludesSection(pykickstart.sections.Section):
|
|||||||
if not self.handler:
|
if not self.handler:
|
||||||
return
|
return
|
||||||
|
|
||||||
(h, s, t) = line.partition('#')
|
(h, s, t) = line.partition("#")
|
||||||
line = h.rstrip()
|
line = h.rstrip()
|
||||||
|
|
||||||
self.handler.fulltree_excludes.add(line)
|
self.handler.fulltree_excludes.add(line)
|
||||||
@ -95,7 +95,7 @@ class MultilibBlacklistSection(pykickstart.sections.Section):
|
|||||||
if not self.handler:
|
if not self.handler:
|
||||||
return
|
return
|
||||||
|
|
||||||
(h, s, t) = line.partition('#')
|
(h, s, t) = line.partition("#")
|
||||||
line = h.rstrip()
|
line = h.rstrip()
|
||||||
|
|
||||||
self.handler.multilib_blacklist.add(line)
|
self.handler.multilib_blacklist.add(line)
|
||||||
@ -108,7 +108,7 @@ class MultilibWhitelistSection(pykickstart.sections.Section):
|
|||||||
if not self.handler:
|
if not self.handler:
|
||||||
return
|
return
|
||||||
|
|
||||||
(h, s, t) = line.partition('#')
|
(h, s, t) = line.partition("#")
|
||||||
line = h.rstrip()
|
line = h.rstrip()
|
||||||
|
|
||||||
self.handler.multilib_whitelist.add(line)
|
self.handler.multilib_whitelist.add(line)
|
||||||
@ -121,7 +121,7 @@ class PrepopulateSection(pykickstart.sections.Section):
|
|||||||
if not self.handler:
|
if not self.handler:
|
||||||
return
|
return
|
||||||
|
|
||||||
(h, s, t) = line.partition('#')
|
(h, s, t) = line.partition("#")
|
||||||
line = h.rstrip()
|
line = h.rstrip()
|
||||||
|
|
||||||
self.handler.prepopulate.add(line)
|
self.handler.prepopulate.add(line)
|
||||||
@ -154,7 +154,15 @@ class KickstartParser(pykickstart.parser.KickstartParser):
|
|||||||
include_default = True
|
include_default = True
|
||||||
include_optional = True
|
include_optional = True
|
||||||
|
|
||||||
group_packages, group_conditional_packages = dnf_obj.comps_wrapper.get_packages_from_group(group_id, include_default=include_default, include_optional=include_optional, include_conditional=True)
|
(
|
||||||
|
group_packages,
|
||||||
|
group_conditional_packages,
|
||||||
|
) = dnf_obj.comps_wrapper.get_packages_from_group(
|
||||||
|
group_id,
|
||||||
|
include_default=include_default,
|
||||||
|
include_optional=include_optional,
|
||||||
|
include_conditional=True,
|
||||||
|
)
|
||||||
packages.update(group_packages)
|
packages.update(group_packages)
|
||||||
for i in group_conditional_packages:
|
for i in group_conditional_packages:
|
||||||
if i not in conditional_packages:
|
if i not in conditional_packages:
|
||||||
@ -178,7 +186,15 @@ class KickstartParser(pykickstart.parser.KickstartParser):
|
|||||||
include_default = True
|
include_default = True
|
||||||
include_optional = True
|
include_optional = True
|
||||||
|
|
||||||
group_packages, group_conditional_packages = dnf_obj.comps_wrapper.get_packages_from_group(group_id, include_default=include_default, include_optional=include_optional, include_conditional=False)
|
(
|
||||||
|
group_packages,
|
||||||
|
group_conditional_packages,
|
||||||
|
) = dnf_obj.comps_wrapper.get_packages_from_group(
|
||||||
|
group_id,
|
||||||
|
include_default=include_default,
|
||||||
|
include_optional=include_optional,
|
||||||
|
include_conditional=False,
|
||||||
|
)
|
||||||
excluded.update(group_packages)
|
excluded.update(group_packages)
|
||||||
|
|
||||||
return excluded
|
return excluded
|
||||||
|
@ -56,7 +56,9 @@ class LinkerThread(WorkerThread):
|
|||||||
src, dst = item
|
src, dst = item
|
||||||
|
|
||||||
if (num % 100 == 0) or (num == self.pool.queue_total):
|
if (num % 100 == 0) or (num == self.pool.queue_total):
|
||||||
self.pool.log_debug("Linked %s out of %s packages" % (num, self.pool.queue_total))
|
self.pool.log_debug(
|
||||||
|
"Linked %s out of %s packages" % (num, self.pool.queue_total)
|
||||||
|
)
|
||||||
|
|
||||||
directory = os.path.dirname(dst)
|
directory = os.path.dirname(dst)
|
||||||
makedirs(directory)
|
makedirs(directory)
|
||||||
@ -113,7 +115,10 @@ class Linker(kobo.log.LoggingBase):
|
|||||||
if os.path.islink(dst) and self._is_same(old_src, dst):
|
if os.path.islink(dst) and self._is_same(old_src, dst):
|
||||||
if os.readlink(dst) != src:
|
if os.readlink(dst) != src:
|
||||||
raise
|
raise
|
||||||
self.log_debug("The same file already exists, skipping symlink %s -> %s" % (dst, src))
|
self.log_debug(
|
||||||
|
"The same file already exists, skipping symlink %s -> %s"
|
||||||
|
% (dst, src)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -134,9 +139,15 @@ class Linker(kobo.log.LoggingBase):
|
|||||||
raise
|
raise
|
||||||
if self._is_same(src, dst):
|
if self._is_same(src, dst):
|
||||||
if not self._is_same_type(src, dst):
|
if not self._is_same_type(src, dst):
|
||||||
self.log_error("File %s already exists but has different type than %s" % (dst, src))
|
self.log_error(
|
||||||
|
"File %s already exists but has different type than %s"
|
||||||
|
% (dst, src)
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
self.log_debug("The same file already exists, skipping hardlink %s to %s" % (src, dst))
|
self.log_debug(
|
||||||
|
"The same file already exists, skipping hardlink %s to %s"
|
||||||
|
% (src, dst)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -157,9 +168,14 @@ class Linker(kobo.log.LoggingBase):
|
|||||||
if os.path.exists(dst):
|
if os.path.exists(dst):
|
||||||
if self._is_same(src, dst):
|
if self._is_same(src, dst):
|
||||||
if not self._is_same_type(src, dst):
|
if not self._is_same_type(src, dst):
|
||||||
self.log_error("File %s already exists but has different type than %s" % (dst, src))
|
self.log_error(
|
||||||
|
"File %s already exists but has different type than %s"
|
||||||
|
% (dst, src)
|
||||||
|
)
|
||||||
raise OSError(errno.EEXIST, "File exists")
|
raise OSError(errno.EEXIST, "File exists")
|
||||||
self.log_debug("The same file already exists, skipping copy %s to %s" % (src, dst))
|
self.log_debug(
|
||||||
|
"The same file already exists, skipping copy %s to %s" % (src, dst)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
raise OSError(errno.EEXIST, "File exists")
|
raise OSError(errno.EEXIST, "File exists")
|
||||||
@ -174,7 +190,10 @@ class Linker(kobo.log.LoggingBase):
|
|||||||
src_key = (src_stat.st_dev, src_stat.st_ino)
|
src_key = (src_stat.st_dev, src_stat.st_ino)
|
||||||
if src_key in self._inode_map:
|
if src_key in self._inode_map:
|
||||||
# (st_dev, st_ino) found in the mapping
|
# (st_dev, st_ino) found in the mapping
|
||||||
self.log_debug("Harlink detected, hardlinking in destination %s to %s" % (self._inode_map[src_key], dst))
|
self.log_debug(
|
||||||
|
"Harlink detected, hardlinking in destination %s to %s"
|
||||||
|
% (self._inode_map[src_key], dst)
|
||||||
|
)
|
||||||
os.link(self._inode_map[src_key], dst)
|
os.link(self._inode_map[src_key], dst)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -61,6 +61,7 @@ class MediaSplitter(object):
|
|||||||
are added; there is no re-ordering. The number of disk is thus not the
|
are added; there is no re-ordering. The number of disk is thus not the
|
||||||
possible minimum.
|
possible minimum.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, media_size, compose=None, logger=None):
|
def __init__(self, media_size, compose=None, logger=None):
|
||||||
self.media_size = media_size
|
self.media_size = media_size
|
||||||
self.files = [] # to preserve order
|
self.files = [] # to preserve order
|
||||||
@ -77,7 +78,9 @@ class MediaSplitter(object):
|
|||||||
old_size = self.file_sizes.get(name, None)
|
old_size = self.file_sizes.get(name, None)
|
||||||
|
|
||||||
if old_size is not None and old_size != size:
|
if old_size is not None and old_size != size:
|
||||||
raise ValueError("File size mismatch; file: %s; sizes: %s vs %s" % (name, old_size, size))
|
raise ValueError(
|
||||||
|
"File size mismatch; file: %s; sizes: %s vs %s" % (name, old_size, size)
|
||||||
|
)
|
||||||
if self.media_size and size > self.media_size:
|
if self.media_size and size > self.media_size:
|
||||||
raise ValueError("File is larger than media size: %s" % name)
|
raise ValueError("File is larger than media size: %s" % name)
|
||||||
|
|
||||||
|
@ -32,11 +32,22 @@ def get_description(compose, variant, arch):
|
|||||||
result = compose.conf["release_discinfo_description"]
|
result = compose.conf["release_discinfo_description"]
|
||||||
elif variant.type == "layered-product":
|
elif variant.type == "layered-product":
|
||||||
# we need to make sure the layered product behaves as it was composed separately
|
# we need to make sure the layered product behaves as it was composed separately
|
||||||
result = "%s %s for %s %s" % (variant.release_name, variant.release_version, compose.conf["release_name"], get_major_version(compose.conf["release_version"]))
|
result = "%s %s for %s %s" % (
|
||||||
|
variant.release_name,
|
||||||
|
variant.release_version,
|
||||||
|
compose.conf["release_name"],
|
||||||
|
get_major_version(compose.conf["release_version"]),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
result = "%s %s" % (compose.conf["release_name"], compose.conf["release_version"])
|
result = "%s %s" % (
|
||||||
|
compose.conf["release_name"],
|
||||||
|
compose.conf["release_version"],
|
||||||
|
)
|
||||||
if compose.conf.get("base_product_name", ""):
|
if compose.conf.get("base_product_name", ""):
|
||||||
result += " for %s %s" % (compose.conf["base_product_name"], compose.conf["base_product_version"])
|
result += " for %s %s" % (
|
||||||
|
compose.conf["base_product_name"],
|
||||||
|
compose.conf["base_product_version"],
|
||||||
|
)
|
||||||
|
|
||||||
result = result % {"variant_name": variant.name, "arch": arch}
|
result = result % {"variant_name": variant.name, "arch": arch}
|
||||||
return result
|
return result
|
||||||
@ -112,32 +123,122 @@ def compose_to_composeinfo(compose):
|
|||||||
|
|
||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
# paths: binaries
|
# paths: binaries
|
||||||
var.paths.os_tree[arch] = relative_path(compose.paths.compose.os_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
var.paths.os_tree[arch] = relative_path(
|
||||||
var.paths.repository[arch] = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
compose.paths.compose.os_tree(
|
||||||
var.paths.packages[arch] = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
arch=arch, variant=variant, create_dir=False
|
||||||
iso_dir = compose.paths.compose.iso_dir(arch=arch, variant=variant, create_dir=False) or ""
|
).rstrip("/")
|
||||||
if iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), iso_dir)):
|
+ "/",
|
||||||
var.paths.isos[arch] = relative_path(iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
jigdo_dir = compose.paths.compose.jigdo_dir(arch=arch, variant=variant, create_dir=False) or ""
|
).rstrip("/")
|
||||||
if jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), jigdo_dir)):
|
var.paths.repository[arch] = relative_path(
|
||||||
var.paths.jigdos[arch] = relative_path(jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
compose.paths.compose.repository(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
|
).rstrip("/")
|
||||||
|
var.paths.packages[arch] = relative_path(
|
||||||
|
compose.paths.compose.packages(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
|
).rstrip("/")
|
||||||
|
iso_dir = (
|
||||||
|
compose.paths.compose.iso_dir(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
)
|
||||||
|
or ""
|
||||||
|
)
|
||||||
|
if iso_dir and os.path.isdir(
|
||||||
|
os.path.join(compose.paths.compose.topdir(), iso_dir)
|
||||||
|
):
|
||||||
|
var.paths.isos[arch] = relative_path(
|
||||||
|
iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||||
|
).rstrip("/")
|
||||||
|
jigdo_dir = (
|
||||||
|
compose.paths.compose.jigdo_dir(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
)
|
||||||
|
or ""
|
||||||
|
)
|
||||||
|
if jigdo_dir and os.path.isdir(
|
||||||
|
os.path.join(compose.paths.compose.topdir(), jigdo_dir)
|
||||||
|
):
|
||||||
|
var.paths.jigdos[arch] = relative_path(
|
||||||
|
jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||||
|
).rstrip("/")
|
||||||
|
|
||||||
# paths: sources
|
# paths: sources
|
||||||
var.paths.source_tree[arch] = relative_path(compose.paths.compose.os_tree(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
var.paths.source_tree[arch] = relative_path(
|
||||||
var.paths.source_repository[arch] = relative_path(compose.paths.compose.repository(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
compose.paths.compose.os_tree(
|
||||||
var.paths.source_packages[arch] = relative_path(compose.paths.compose.packages(arch="source", variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
arch="source", variant=variant, create_dir=False
|
||||||
source_iso_dir = compose.paths.compose.iso_dir(arch="source", variant=variant, create_dir=False) or ""
|
).rstrip("/")
|
||||||
if source_iso_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_iso_dir)):
|
+ "/",
|
||||||
var.paths.source_isos[arch] = relative_path(source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
source_jigdo_dir = compose.paths.compose.jigdo_dir(arch="source", variant=variant, create_dir=False) or ""
|
).rstrip("/")
|
||||||
if source_jigdo_dir and os.path.isdir(os.path.join(compose.paths.compose.topdir(), source_jigdo_dir)):
|
var.paths.source_repository[arch] = relative_path(
|
||||||
var.paths.source_jigdos[arch] = relative_path(source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
compose.paths.compose.repository(
|
||||||
|
arch="source", variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
|
).rstrip("/")
|
||||||
|
var.paths.source_packages[arch] = relative_path(
|
||||||
|
compose.paths.compose.packages(
|
||||||
|
arch="source", variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
|
).rstrip("/")
|
||||||
|
source_iso_dir = (
|
||||||
|
compose.paths.compose.iso_dir(
|
||||||
|
arch="source", variant=variant, create_dir=False
|
||||||
|
)
|
||||||
|
or ""
|
||||||
|
)
|
||||||
|
if source_iso_dir and os.path.isdir(
|
||||||
|
os.path.join(compose.paths.compose.topdir(), source_iso_dir)
|
||||||
|
):
|
||||||
|
var.paths.source_isos[arch] = relative_path(
|
||||||
|
source_iso_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||||
|
).rstrip("/")
|
||||||
|
source_jigdo_dir = (
|
||||||
|
compose.paths.compose.jigdo_dir(
|
||||||
|
arch="source", variant=variant, create_dir=False
|
||||||
|
)
|
||||||
|
or ""
|
||||||
|
)
|
||||||
|
if source_jigdo_dir and os.path.isdir(
|
||||||
|
os.path.join(compose.paths.compose.topdir(), source_jigdo_dir)
|
||||||
|
):
|
||||||
|
var.paths.source_jigdos[arch] = relative_path(
|
||||||
|
source_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/"
|
||||||
|
).rstrip("/")
|
||||||
|
|
||||||
# paths: debug
|
# paths: debug
|
||||||
var.paths.debug_tree[arch] = relative_path(compose.paths.compose.debug_tree(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
var.paths.debug_tree[arch] = relative_path(
|
||||||
var.paths.debug_repository[arch] = relative_path(compose.paths.compose.debug_repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
compose.paths.compose.debug_tree(
|
||||||
var.paths.debug_packages[arch] = relative_path(compose.paths.compose.debug_packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
arch=arch, variant=variant, create_dir=False
|
||||||
'''
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
|
).rstrip("/")
|
||||||
|
var.paths.debug_repository[arch] = relative_path(
|
||||||
|
compose.paths.compose.debug_repository(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
|
).rstrip("/")
|
||||||
|
var.paths.debug_packages[arch] = relative_path(
|
||||||
|
compose.paths.compose.debug_packages(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
compose.paths.compose.topdir().rstrip("/") + "/",
|
||||||
|
).rstrip("/")
|
||||||
|
"""
|
||||||
# XXX: not suported (yet?)
|
# XXX: not suported (yet?)
|
||||||
debug_iso_dir = compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
|
debug_iso_dir = compose.paths.compose.debug_iso_dir(arch=arch, variant=variant) or ""
|
||||||
if debug_iso_dir:
|
if debug_iso_dir:
|
||||||
@ -145,7 +246,7 @@ def compose_to_composeinfo(compose):
|
|||||||
debug_jigdo_dir = compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
|
debug_jigdo_dir = compose.paths.compose.debug_jigdo_dir(arch=arch, variant=variant) or ""
|
||||||
if debug_jigdo_dir:
|
if debug_jigdo_dir:
|
||||||
var.debug_jigdo_dir[arch] = relative_path(debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
var.debug_jigdo_dir[arch] = relative_path(debug_jigdo_dir, compose.paths.compose.topdir().rstrip("/") + "/").rstrip("/")
|
||||||
'''
|
"""
|
||||||
|
|
||||||
for v in variant.get_variants(recursive=False):
|
for v in variant.get_variants(recursive=False):
|
||||||
x = dump_variant(v, parent=variant)
|
x = dump_variant(v, parent=variant)
|
||||||
@ -187,17 +288,22 @@ def write_compose_info(compose):
|
|||||||
|
|
||||||
|
|
||||||
def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
||||||
if variant.type in ("addon", ) or variant.is_empty:
|
if variant.type in ("addon",) or variant.is_empty:
|
||||||
return
|
return
|
||||||
|
|
||||||
compose.log_debug("on arch '%s' looking at variant '%s' of type '%s'" % (arch, variant, variant.type))
|
compose.log_debug(
|
||||||
|
"on arch '%s' looking at variant '%s' of type '%s'"
|
||||||
|
% (arch, variant, variant.type)
|
||||||
|
)
|
||||||
|
|
||||||
if not timestamp:
|
if not timestamp:
|
||||||
timestamp = int(time.time())
|
timestamp = int(time.time())
|
||||||
else:
|
else:
|
||||||
timestamp = int(timestamp)
|
timestamp = int(timestamp)
|
||||||
|
|
||||||
os_tree = compose.paths.compose.os_tree(arch=arch, variant=variant).rstrip("/") + "/"
|
os_tree = (
|
||||||
|
compose.paths.compose.os_tree(arch=arch, variant=variant).rstrip("/") + "/"
|
||||||
|
)
|
||||||
|
|
||||||
ti = productmd.treeinfo.TreeInfo()
|
ti = productmd.treeinfo.TreeInfo()
|
||||||
# load from buildinstall .treeinfo
|
# load from buildinstall .treeinfo
|
||||||
@ -224,9 +330,13 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
else:
|
else:
|
||||||
# release
|
# release
|
||||||
ti.release.name = compose.conf["release_name"]
|
ti.release.name = compose.conf["release_name"]
|
||||||
ti.release.version = compose.conf.get("treeinfo_version", compose.conf["release_version"])
|
ti.release.version = compose.conf.get(
|
||||||
|
"treeinfo_version", compose.conf["release_version"]
|
||||||
|
)
|
||||||
ti.release.short = compose.conf["release_short"]
|
ti.release.short = compose.conf["release_short"]
|
||||||
ti.release.is_layered = True if compose.conf.get("base_product_name", "") else False
|
ti.release.is_layered = (
|
||||||
|
True if compose.conf.get("base_product_name", "") else False
|
||||||
|
)
|
||||||
ti.release.type = compose.conf["release_type"].lower()
|
ti.release.type = compose.conf["release_type"].lower()
|
||||||
|
|
||||||
# base product
|
# base product
|
||||||
@ -254,8 +364,26 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
var.name = variant.name
|
var.name = variant.name
|
||||||
var.type = variant.type
|
var.type = variant.type
|
||||||
|
|
||||||
var.paths.packages = relative_path(compose.paths.compose.packages(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
|
var.paths.packages = (
|
||||||
var.paths.repository = relative_path(compose.paths.compose.repository(arch=arch, variant=variant, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
|
relative_path(
|
||||||
|
compose.paths.compose.packages(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
os_tree,
|
||||||
|
).rstrip("/")
|
||||||
|
or "."
|
||||||
|
)
|
||||||
|
var.paths.repository = (
|
||||||
|
relative_path(
|
||||||
|
compose.paths.compose.repository(
|
||||||
|
arch=arch, variant=variant, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
os_tree,
|
||||||
|
).rstrip("/")
|
||||||
|
or "."
|
||||||
|
)
|
||||||
|
|
||||||
ti.variants.add(var)
|
ti.variants.add(var)
|
||||||
|
|
||||||
@ -270,11 +398,32 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
addon.uid = i.uid
|
addon.uid = i.uid
|
||||||
addon.name = i.name
|
addon.name = i.name
|
||||||
addon.type = i.type
|
addon.type = i.type
|
||||||
compose.log_debug("variant '%s' inserting addon uid '%s' type '%s'" % (variant, addon.uid, addon.type))
|
compose.log_debug(
|
||||||
|
"variant '%s' inserting addon uid '%s' type '%s'"
|
||||||
|
% (variant, addon.uid, addon.type)
|
||||||
|
)
|
||||||
|
|
||||||
os_tree = compose.paths.compose.os_tree(arch=arch, variant=i).rstrip("/") + "/"
|
os_tree = compose.paths.compose.os_tree(arch=arch, variant=i).rstrip("/") + "/"
|
||||||
addon.paths.packages = relative_path(compose.paths.compose.packages(arch=arch, variant=i, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
|
addon.paths.packages = (
|
||||||
addon.paths.repository = relative_path(compose.paths.compose.repository(arch=arch, variant=i, create_dir=False).rstrip("/") + "/", os_tree).rstrip("/") or "."
|
relative_path(
|
||||||
|
compose.paths.compose.packages(
|
||||||
|
arch=arch, variant=i, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
os_tree,
|
||||||
|
).rstrip("/")
|
||||||
|
or "."
|
||||||
|
)
|
||||||
|
addon.paths.repository = (
|
||||||
|
relative_path(
|
||||||
|
compose.paths.compose.repository(
|
||||||
|
arch=arch, variant=i, create_dir=False
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
os_tree,
|
||||||
|
).rstrip("/")
|
||||||
|
or "."
|
||||||
|
)
|
||||||
var.add(addon)
|
var.add(addon)
|
||||||
|
|
||||||
repomd_path = os.path.join(addon.paths.repository, "repodata", "repomd.xml")
|
repomd_path = os.path.join(addon.paths.repository, "repodata", "repomd.xml")
|
||||||
@ -299,7 +448,7 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
# clone all but 'general' sections from buildinstall .treeinfo
|
# clone all but 'general' sections from buildinstall .treeinfo
|
||||||
|
|
||||||
bi_dir = compose.paths.work.buildinstall_dir(arch)
|
bi_dir = compose.paths.work.buildinstall_dir(arch)
|
||||||
if compose.conf.get('buildinstall_method') == 'lorax':
|
if compose.conf.get("buildinstall_method") == "lorax":
|
||||||
# The .treeinfo file produced by lorax is nested in variant
|
# The .treeinfo file produced by lorax is nested in variant
|
||||||
# subdirectory. Legacy buildinstall runs once per arch, so there is
|
# subdirectory. Legacy buildinstall runs once per arch, so there is
|
||||||
# only one file.
|
# only one file.
|
||||||
@ -313,12 +462,16 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
# stage2 - mainimage
|
# stage2 - mainimage
|
||||||
if bi_ti.stage2.mainimage:
|
if bi_ti.stage2.mainimage:
|
||||||
ti.stage2.mainimage = bi_ti.stage2.mainimage
|
ti.stage2.mainimage = bi_ti.stage2.mainimage
|
||||||
ti.checksums.add(ti.stage2.mainimage, createrepo_checksum, root_dir=os_tree)
|
ti.checksums.add(
|
||||||
|
ti.stage2.mainimage, createrepo_checksum, root_dir=os_tree
|
||||||
|
)
|
||||||
|
|
||||||
# stage2 - instimage
|
# stage2 - instimage
|
||||||
if bi_ti.stage2.instimage:
|
if bi_ti.stage2.instimage:
|
||||||
ti.stage2.instimage = bi_ti.stage2.instimage
|
ti.stage2.instimage = bi_ti.stage2.instimage
|
||||||
ti.checksums.add(ti.stage2.instimage, createrepo_checksum, root_dir=os_tree)
|
ti.checksums.add(
|
||||||
|
ti.stage2.instimage, createrepo_checksum, root_dir=os_tree
|
||||||
|
)
|
||||||
|
|
||||||
# images
|
# images
|
||||||
for platform in bi_ti.images.images:
|
for platform in bi_ti.images.images:
|
||||||
@ -332,7 +485,9 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
ti.images.images[platform][image] = path
|
ti.images.images[platform][image] = path
|
||||||
ti.checksums.add(path, createrepo_checksum, root_dir=os_tree)
|
ti.checksums.add(path, createrepo_checksum, root_dir=os_tree)
|
||||||
|
|
||||||
path = os.path.join(compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo")
|
path = os.path.join(
|
||||||
|
compose.paths.compose.os_tree(arch=arch, variant=variant), ".treeinfo"
|
||||||
|
)
|
||||||
compose.log_info("Writing treeinfo: %s" % path)
|
compose.log_info("Writing treeinfo: %s" % path)
|
||||||
ti.dump(path)
|
ti.dump(path)
|
||||||
|
|
||||||
@ -365,6 +520,8 @@ def populate_extra_files_metadata(
|
|||||||
copied_file = os.path.relpath(full_path, relative_root)
|
copied_file = os.path.relpath(full_path, relative_root)
|
||||||
metadata.add(variant.uid, arch, copied_file, size, checksums)
|
metadata.add(variant.uid, arch, copied_file, size, checksums)
|
||||||
|
|
||||||
strip_prefix = (os.path.relpath(topdir, relative_root) + "/") if relative_root else ""
|
strip_prefix = (
|
||||||
|
(os.path.relpath(topdir, relative_root) + "/") if relative_root else ""
|
||||||
|
)
|
||||||
with open(os.path.join(topdir, "extra_files.json"), "w") as f:
|
with open(os.path.join(topdir, "extra_files.json"), "w") as f:
|
||||||
metadata.dump_for_tree(f, variant.uid, arch, strip_prefix)
|
metadata.dump_for_tree(f, variant.uid, arch, strip_prefix)
|
||||||
|
@ -26,16 +26,17 @@ class Multilib(object):
|
|||||||
method that accepts a DNF sach and an iterable of globs that will be used
|
method that accepts a DNF sach and an iterable of globs that will be used
|
||||||
to find package names.
|
to find package names.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, methods, blacklist, whitelist):
|
def __init__(self, methods, blacklist, whitelist):
|
||||||
self.methods = {}
|
self.methods = {}
|
||||||
self.blacklist = blacklist
|
self.blacklist = blacklist
|
||||||
self.whitelist = whitelist
|
self.whitelist = whitelist
|
||||||
|
|
||||||
self.all_methods = {
|
self.all_methods = {
|
||||||
'none': multilib.NoMultilibMethod(None),
|
"none": multilib.NoMultilibMethod(None),
|
||||||
'all': multilib.AllMultilibMethod(None),
|
"all": multilib.AllMultilibMethod(None),
|
||||||
'devel': multilib.DevelMultilibMethod(None),
|
"devel": multilib.DevelMultilibMethod(None),
|
||||||
'runtime': multilib.RuntimeMultilibMethod(None),
|
"runtime": multilib.RuntimeMultilibMethod(None),
|
||||||
}
|
}
|
||||||
|
|
||||||
for method in methods:
|
for method in methods:
|
||||||
@ -44,15 +45,17 @@ class Multilib(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def from_globs(cls, sack, methods, blacklist=None, whitelist=None):
|
def from_globs(cls, sack, methods, blacklist=None, whitelist=None):
|
||||||
"""Create a Multilib instance with expanded blacklist and whitelist."""
|
"""Create a Multilib instance with expanded blacklist and whitelist."""
|
||||||
return cls(methods,
|
return cls(
|
||||||
|
methods,
|
||||||
_expand_list(sack, blacklist or []),
|
_expand_list(sack, blacklist or []),
|
||||||
_expand_list(sack, whitelist or []))
|
_expand_list(sack, whitelist or []),
|
||||||
|
)
|
||||||
|
|
||||||
def is_multilib(self, pkg):
|
def is_multilib(self, pkg):
|
||||||
if pkg.name in self.blacklist:
|
if pkg.name in self.blacklist:
|
||||||
return False
|
return False
|
||||||
if pkg.name in self.whitelist:
|
if pkg.name in self.whitelist:
|
||||||
return 'whitelist'
|
return "whitelist"
|
||||||
for method, cls in self.methods.items():
|
for method, cls in self.methods.items():
|
||||||
if cls.select(pkg):
|
if cls.select(pkg):
|
||||||
return method
|
return method
|
||||||
|
@ -22,7 +22,9 @@ import pungi.util
|
|||||||
|
|
||||||
|
|
||||||
LINE_PATTERN_RE = re.compile(r"^\s*(?P<line>[^#]+)(:?\s+(?P<comment>#.*))?$")
|
LINE_PATTERN_RE = re.compile(r"^\s*(?P<line>[^#]+)(:?\s+(?P<comment>#.*))?$")
|
||||||
RUNTIME_PATTERN_SPLIT_RE = re.compile(r"^\s*(?P<path>[^\s]+)\s+(?P<pattern>[^\s]+)(:?\s+(?P<comment>#.*))?$")
|
RUNTIME_PATTERN_SPLIT_RE = re.compile(
|
||||||
|
r"^\s*(?P<path>[^\s]+)\s+(?P<pattern>[^\s]+)(:?\s+(?P<comment>#.*))?$"
|
||||||
|
)
|
||||||
SONAME_PATTERN_RE = re.compile(r"^(.+\.so\.[a-zA-Z0-9_\.]+).*$")
|
SONAME_PATTERN_RE = re.compile(r"^(.+\.so\.[a-zA-Z0-9_\.]+).*$")
|
||||||
|
|
||||||
|
|
||||||
@ -86,6 +88,7 @@ def expand_runtime_patterns(patterns):
|
|||||||
|
|
||||||
class MultilibMethodBase(object):
|
class MultilibMethodBase(object):
|
||||||
"""a base class for multilib methods"""
|
"""a base class for multilib methods"""
|
||||||
|
|
||||||
name = "base"
|
name = "base"
|
||||||
|
|
||||||
def __init__(self, config_path):
|
def __init__(self, config_path):
|
||||||
@ -95,7 +98,11 @@ class MultilibMethodBase(object):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def skip(self, po):
|
def skip(self, po):
|
||||||
if pungi.gather.is_noarch(po) or pungi.gather.is_source(po) or pungi.util.pkg_is_debug(po):
|
if (
|
||||||
|
pungi.gather.is_noarch(po)
|
||||||
|
or pungi.gather.is_source(po)
|
||||||
|
or pungi.util.pkg_is_debug(po)
|
||||||
|
):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -120,6 +127,7 @@ class MultilibMethodBase(object):
|
|||||||
|
|
||||||
class NoneMultilibMethod(MultilibMethodBase):
|
class NoneMultilibMethod(MultilibMethodBase):
|
||||||
"""multilib disabled"""
|
"""multilib disabled"""
|
||||||
|
|
||||||
name = "none"
|
name = "none"
|
||||||
|
|
||||||
def select(self, po):
|
def select(self, po):
|
||||||
@ -128,6 +136,7 @@ class NoneMultilibMethod(MultilibMethodBase):
|
|||||||
|
|
||||||
class AllMultilibMethod(MultilibMethodBase):
|
class AllMultilibMethod(MultilibMethodBase):
|
||||||
"""all packages are multilib"""
|
"""all packages are multilib"""
|
||||||
|
|
||||||
name = "all"
|
name = "all"
|
||||||
|
|
||||||
def select(self, po):
|
def select(self, po):
|
||||||
@ -138,13 +147,20 @@ class AllMultilibMethod(MultilibMethodBase):
|
|||||||
|
|
||||||
class RuntimeMultilibMethod(MultilibMethodBase):
|
class RuntimeMultilibMethod(MultilibMethodBase):
|
||||||
"""pre-defined paths to libs"""
|
"""pre-defined paths to libs"""
|
||||||
|
|
||||||
name = "runtime"
|
name = "runtime"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(RuntimeMultilibMethod, self).__init__(*args, **kwargs)
|
super(RuntimeMultilibMethod, self).__init__(*args, **kwargs)
|
||||||
self.blacklist = read_lines_from_file(self.config_path+"runtime-blacklist.conf")
|
self.blacklist = read_lines_from_file(
|
||||||
self.whitelist = read_lines_from_file(self.config_path+"runtime-whitelist.conf")
|
self.config_path + "runtime-blacklist.conf"
|
||||||
self.patterns = expand_runtime_patterns(read_runtime_patterns_from_file(self.config_path+"runtime-patterns.conf"))
|
)
|
||||||
|
self.whitelist = read_lines_from_file(
|
||||||
|
self.config_path + "runtime-whitelist.conf"
|
||||||
|
)
|
||||||
|
self.patterns = expand_runtime_patterns(
|
||||||
|
read_runtime_patterns_from_file(self.config_path + "runtime-patterns.conf")
|
||||||
|
)
|
||||||
|
|
||||||
def select(self, po):
|
def select(self, po):
|
||||||
if self.skip(po):
|
if self.skip(po):
|
||||||
@ -186,6 +202,7 @@ class RuntimeMultilibMethod(MultilibMethodBase):
|
|||||||
|
|
||||||
class KernelMultilibMethod(MultilibMethodBase):
|
class KernelMultilibMethod(MultilibMethodBase):
|
||||||
"""kernel and kernel-devel"""
|
"""kernel and kernel-devel"""
|
||||||
|
|
||||||
name = "kernel"
|
name = "kernel"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -199,6 +216,7 @@ class KernelMultilibMethod(MultilibMethodBase):
|
|||||||
|
|
||||||
class YabootMultilibMethod(MultilibMethodBase):
|
class YabootMultilibMethod(MultilibMethodBase):
|
||||||
"""yaboot on ppc"""
|
"""yaboot on ppc"""
|
||||||
|
|
||||||
name = "yaboot"
|
name = "yaboot"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -213,12 +231,13 @@ class YabootMultilibMethod(MultilibMethodBase):
|
|||||||
|
|
||||||
class DevelMultilibMethod(MultilibMethodBase):
|
class DevelMultilibMethod(MultilibMethodBase):
|
||||||
"""all -devel and -static packages"""
|
"""all -devel and -static packages"""
|
||||||
|
|
||||||
name = "devel"
|
name = "devel"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(DevelMultilibMethod, self).__init__(*args, **kwargs)
|
super(DevelMultilibMethod, self).__init__(*args, **kwargs)
|
||||||
self.blacklist = read_lines_from_file(self.config_path+"devel-blacklist.conf")
|
self.blacklist = read_lines_from_file(self.config_path + "devel-blacklist.conf")
|
||||||
self.whitelist = read_lines_from_file(self.config_path+"devel-whitelist.conf")
|
self.whitelist = read_lines_from_file(self.config_path + "devel-whitelist.conf")
|
||||||
|
|
||||||
def select(self, po):
|
def select(self, po):
|
||||||
if self.skip(po):
|
if self.skip(po):
|
||||||
@ -254,8 +273,14 @@ def init(config_path="/usr/share/pungi/multilib/"):
|
|||||||
if not config_path.endswith("/"):
|
if not config_path.endswith("/"):
|
||||||
config_path += "/"
|
config_path += "/"
|
||||||
|
|
||||||
for cls in (AllMultilibMethod, DevelMultilibMethod, KernelMultilibMethod,
|
for cls in (
|
||||||
NoneMultilibMethod, RuntimeMultilibMethod, YabootMultilibMethod):
|
AllMultilibMethod,
|
||||||
|
DevelMultilibMethod,
|
||||||
|
KernelMultilibMethod,
|
||||||
|
NoneMultilibMethod,
|
||||||
|
RuntimeMultilibMethod,
|
||||||
|
YabootMultilibMethod,
|
||||||
|
):
|
||||||
method = cls(config_path)
|
method = cls(config_path)
|
||||||
METHOD_MAP[method.name] = method
|
METHOD_MAP[method.name] = method
|
||||||
|
|
||||||
|
@ -29,6 +29,7 @@ class PungiNotifier(object):
|
|||||||
script fails, a warning will be logged, but the compose process will not be
|
script fails, a warning will be logged, but the compose process will not be
|
||||||
interrupted.
|
interrupted.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cmds):
|
def __init__(self, cmds):
|
||||||
self.cmds = cmds
|
self.cmds = cmds
|
||||||
self.lock = threading.Lock()
|
self.lock = threading.Lock()
|
||||||
@ -38,30 +39,31 @@ class PungiNotifier(object):
|
|||||||
"""Add compose related information to the data."""
|
"""Add compose related information to the data."""
|
||||||
if not self.compose:
|
if not self.compose:
|
||||||
return
|
return
|
||||||
data.setdefault('compose_id', self.compose.compose_id)
|
data.setdefault("compose_id", self.compose.compose_id)
|
||||||
|
|
||||||
# Publish where in the world this compose will end up living
|
# Publish where in the world this compose will end up living
|
||||||
location = pungi.util.translate_path(
|
location = pungi.util.translate_path(
|
||||||
self.compose, self.compose.paths.compose.topdir())
|
self.compose, self.compose.paths.compose.topdir()
|
||||||
data.setdefault('location', location)
|
)
|
||||||
|
data.setdefault("location", location)
|
||||||
|
|
||||||
# Add information about the compose itself.
|
# Add information about the compose itself.
|
||||||
data.setdefault('compose_date', self.compose.compose_date)
|
data.setdefault("compose_date", self.compose.compose_date)
|
||||||
data.setdefault('compose_type', self.compose.compose_type)
|
data.setdefault("compose_type", self.compose.compose_type)
|
||||||
data.setdefault('compose_respin', self.compose.compose_respin)
|
data.setdefault("compose_respin", self.compose.compose_respin)
|
||||||
data.setdefault('compose_label', self.compose.compose_label)
|
data.setdefault("compose_label", self.compose.compose_label)
|
||||||
data.setdefault('release_short', self.compose.conf['release_short'])
|
data.setdefault("release_short", self.compose.conf["release_short"])
|
||||||
data.setdefault('release_name', self.compose.conf['release_name'])
|
data.setdefault("release_name", self.compose.conf["release_name"])
|
||||||
data.setdefault('release_version', self.compose.conf['release_version'])
|
data.setdefault("release_version", self.compose.conf["release_version"])
|
||||||
data.setdefault('release_type', self.compose.conf['release_type'].lower())
|
data.setdefault("release_type", self.compose.conf["release_type"].lower())
|
||||||
data.setdefault('release_is_layered', False)
|
data.setdefault("release_is_layered", False)
|
||||||
|
|
||||||
if self.compose.conf.get('base_product_name', ''):
|
if self.compose.conf.get("base_product_name", ""):
|
||||||
data['release_is_layered'] = True
|
data["release_is_layered"] = True
|
||||||
data['base_product_name'] = self.compose.conf["base_product_name"]
|
data["base_product_name"] = self.compose.conf["base_product_name"]
|
||||||
data['base_product_version'] = self.compose.conf["base_product_version"]
|
data["base_product_version"] = self.compose.conf["base_product_version"]
|
||||||
data['base_product_short'] = self.compose.conf["base_product_short"]
|
data["base_product_short"] = self.compose.conf["base_product_short"]
|
||||||
data['base_product_type'] = self.compose.conf["base_product_type"].lower()
|
data["base_product_type"] = self.compose.conf["base_product_type"].lower()
|
||||||
|
|
||||||
def send(self, msg, workdir=None, **kwargs):
|
def send(self, msg, workdir=None, **kwargs):
|
||||||
"""Send a message.
|
"""Send a message.
|
||||||
@ -89,23 +91,24 @@ class PungiNotifier(object):
|
|||||||
"""Run a single notification script with proper logging."""
|
"""Run a single notification script with proper logging."""
|
||||||
logfile = None
|
logfile = None
|
||||||
if self.compose:
|
if self.compose:
|
||||||
self.compose.log_debug("Notification: %r %r, %r" % (
|
self.compose.log_debug("Notification: %r %r, %r" % (cmd, msg, kwargs))
|
||||||
cmd, msg, kwargs))
|
|
||||||
logfile = os.path.join(
|
logfile = os.path.join(
|
||||||
self.compose.paths.log.topdir(),
|
self.compose.paths.log.topdir(),
|
||||||
'notifications',
|
"notifications",
|
||||||
'notification-%s.log' % datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S')
|
"notification-%s.log" % datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S"),
|
||||||
)
|
)
|
||||||
pungi.util.makedirs(os.path.dirname(logfile))
|
pungi.util.makedirs(os.path.dirname(logfile))
|
||||||
|
|
||||||
ret, _ = shortcuts.run((cmd, msg),
|
ret, _ = shortcuts.run(
|
||||||
|
(cmd, msg),
|
||||||
stdin_data=json.dumps(kwargs),
|
stdin_data=json.dumps(kwargs),
|
||||||
can_fail=True,
|
can_fail=True,
|
||||||
workdir=workdir,
|
workdir=workdir,
|
||||||
return_stdout=False,
|
return_stdout=False,
|
||||||
show_cmd=True,
|
show_cmd=True,
|
||||||
universal_newlines=True,
|
universal_newlines=True,
|
||||||
logfile=logfile)
|
logfile=logfile,
|
||||||
|
)
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
if self.compose:
|
if self.compose:
|
||||||
self.compose.log_warning('Failed to invoke notification script.')
|
self.compose.log_warning("Failed to invoke notification script.")
|
||||||
|
@ -26,62 +26,128 @@ def main(args=None):
|
|||||||
subparser = parser.add_subparsers(help="Sub commands")
|
subparser = parser.add_subparsers(help="Sub commands")
|
||||||
|
|
||||||
treep = subparser.add_parser("tree", help="Compose OSTree repository")
|
treep = subparser.add_parser("tree", help="Compose OSTree repository")
|
||||||
treep.set_defaults(_class=Tree, func='run')
|
treep.set_defaults(_class=Tree, func="run")
|
||||||
treep.add_argument('--repo', metavar='PATH', required=True,
|
treep.add_argument(
|
||||||
help='where to put the OSTree repo (required)')
|
"--repo",
|
||||||
treep.add_argument('--treefile', metavar="FILE", required=True,
|
metavar="PATH",
|
||||||
help='treefile for rpm-ostree (required)')
|
required=True,
|
||||||
treep.add_argument('--log-dir', metavar="DIR", required=True,
|
help="where to put the OSTree repo (required)",
|
||||||
help='where to log output and commitid (required). \
|
)
|
||||||
Note: commitid file will be written to this dir')
|
treep.add_argument(
|
||||||
treep.add_argument('--extra-config', metavar="FILE",
|
"--treefile",
|
||||||
help='JSON file contains extra configurations')
|
metavar="FILE",
|
||||||
treep.add_argument('--version', metavar="VERSION",
|
required=True,
|
||||||
help='version string to be added as versioning metadata')
|
help="treefile for rpm-ostree (required)",
|
||||||
treep.add_argument('--update-summary', action='store_true',
|
)
|
||||||
help='update summary metadata')
|
treep.add_argument(
|
||||||
treep.add_argument('--ostree-ref', metavar='PATH',
|
"--log-dir",
|
||||||
help='override ref value from treefile')
|
metavar="DIR",
|
||||||
treep.add_argument('--force-new-commit', action='store_true',
|
required=True,
|
||||||
help='do not use rpm-ostree\'s built-in change detection')
|
help="where to log output and commitid (required). \
|
||||||
|
Note: commitid file will be written to this dir",
|
||||||
|
)
|
||||||
|
treep.add_argument(
|
||||||
|
"--extra-config", metavar="FILE", help="JSON file contains extra configurations"
|
||||||
|
)
|
||||||
|
treep.add_argument(
|
||||||
|
"--version",
|
||||||
|
metavar="VERSION",
|
||||||
|
help="version string to be added as versioning metadata",
|
||||||
|
)
|
||||||
|
treep.add_argument(
|
||||||
|
"--update-summary", action="store_true", help="update summary metadata"
|
||||||
|
)
|
||||||
|
treep.add_argument(
|
||||||
|
"--ostree-ref", metavar="PATH", help="override ref value from treefile"
|
||||||
|
)
|
||||||
|
treep.add_argument(
|
||||||
|
"--force-new-commit",
|
||||||
|
action="store_true",
|
||||||
|
help="do not use rpm-ostree's built-in change detection",
|
||||||
|
)
|
||||||
|
|
||||||
installerp = subparser.add_parser("installer", help="Create an OSTree installer image")
|
installerp = subparser.add_parser(
|
||||||
installerp.set_defaults(_class=Installer, func='run')
|
"installer", help="Create an OSTree installer image"
|
||||||
installerp.add_argument('-p', '--product', metavar='PRODUCT', required=True,
|
)
|
||||||
help='product name (required)')
|
installerp.set_defaults(_class=Installer, func="run")
|
||||||
installerp.add_argument('-v', '--version', metavar='VERSION', required=True,
|
installerp.add_argument(
|
||||||
help='version identifier (required)')
|
"-p",
|
||||||
installerp.add_argument('-r', '--release', metavar='RELEASE', required=True,
|
"--product",
|
||||||
help='release information (required)')
|
metavar="PRODUCT",
|
||||||
installerp.add_argument('-s', '--source', metavar='REPOSITORY', required=True,
|
required=True,
|
||||||
action='append',
|
help="product name (required)",
|
||||||
help='source repository (required)')
|
)
|
||||||
installerp.add_argument('-o', '--output', metavar='DIR', required=True,
|
installerp.add_argument(
|
||||||
help='path to image output directory (required)')
|
"-v",
|
||||||
installerp.add_argument('--log-dir', metavar='DIR',
|
"--version",
|
||||||
help='path to log directory')
|
metavar="VERSION",
|
||||||
installerp.add_argument('--volid', metavar='VOLID',
|
required=True,
|
||||||
help='volume id')
|
help="version identifier (required)",
|
||||||
installerp.add_argument('--variant', metavar='VARIANT',
|
)
|
||||||
help='variant name')
|
installerp.add_argument(
|
||||||
installerp.add_argument('--rootfs-size', metavar='SIZE')
|
"-r",
|
||||||
installerp.add_argument('--nomacboot', action='store_true', default=False)
|
"--release",
|
||||||
installerp.add_argument('--noupgrade', action='store_true', default=False)
|
metavar="RELEASE",
|
||||||
installerp.add_argument('--isfinal', action='store_true', default=False)
|
required=True,
|
||||||
|
help="release information (required)",
|
||||||
|
)
|
||||||
|
installerp.add_argument(
|
||||||
|
"-s",
|
||||||
|
"--source",
|
||||||
|
metavar="REPOSITORY",
|
||||||
|
required=True,
|
||||||
|
action="append",
|
||||||
|
help="source repository (required)",
|
||||||
|
)
|
||||||
|
installerp.add_argument(
|
||||||
|
"-o",
|
||||||
|
"--output",
|
||||||
|
metavar="DIR",
|
||||||
|
required=True,
|
||||||
|
help="path to image output directory (required)",
|
||||||
|
)
|
||||||
|
installerp.add_argument("--log-dir", metavar="DIR", help="path to log directory")
|
||||||
|
installerp.add_argument("--volid", metavar="VOLID", help="volume id")
|
||||||
|
installerp.add_argument("--variant", metavar="VARIANT", help="variant name")
|
||||||
|
installerp.add_argument("--rootfs-size", metavar="SIZE")
|
||||||
|
installerp.add_argument("--nomacboot", action="store_true", default=False)
|
||||||
|
installerp.add_argument("--noupgrade", action="store_true", default=False)
|
||||||
|
installerp.add_argument("--isfinal", action="store_true", default=False)
|
||||||
|
|
||||||
installerp.add_argument('--installpkgs', metavar='PACKAGE', action='append',
|
installerp.add_argument(
|
||||||
help='package glob to install before runtime-install.tmpl')
|
"--installpkgs",
|
||||||
installerp.add_argument('--add-template', metavar='FILE', action='append',
|
metavar="PACKAGE",
|
||||||
help='Additional template for runtime image')
|
action="append",
|
||||||
installerp.add_argument('--add-template-var', metavar='ADD_TEMPLATE_VARS', action='append',
|
help="package glob to install before runtime-install.tmpl",
|
||||||
help='Set variable for runtime image template')
|
)
|
||||||
installerp.add_argument('--add-arch-template', metavar='FILE', action='append',
|
installerp.add_argument(
|
||||||
help='Additional template for architecture-specific image')
|
"--add-template",
|
||||||
installerp.add_argument('--add-arch-template-var', metavar='ADD_ARCH_TEMPLATE_VARS', action='append',
|
metavar="FILE",
|
||||||
help='Set variable for architecture-specific image')
|
action="append",
|
||||||
|
help="Additional template for runtime image",
|
||||||
|
)
|
||||||
|
installerp.add_argument(
|
||||||
|
"--add-template-var",
|
||||||
|
metavar="ADD_TEMPLATE_VARS",
|
||||||
|
action="append",
|
||||||
|
help="Set variable for runtime image template",
|
||||||
|
)
|
||||||
|
installerp.add_argument(
|
||||||
|
"--add-arch-template",
|
||||||
|
metavar="FILE",
|
||||||
|
action="append",
|
||||||
|
help="Additional template for architecture-specific image",
|
||||||
|
)
|
||||||
|
installerp.add_argument(
|
||||||
|
"--add-arch-template-var",
|
||||||
|
metavar="ADD_ARCH_TEMPLATE_VARS",
|
||||||
|
action="append",
|
||||||
|
help="Set variable for architecture-specific image",
|
||||||
|
)
|
||||||
|
|
||||||
installerp.add_argument('--extra-config', metavar='FILE',
|
installerp.add_argument(
|
||||||
help='JSON file contains extra configurations')
|
"--extra-config", metavar="FILE", help="JSON file contains extra configurations"
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args(args)
|
args = parser.parse_args(args)
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ from ..wrappers import lorax
|
|||||||
|
|
||||||
class Installer(OSTree):
|
class Installer(OSTree):
|
||||||
def _merge_config(self, config):
|
def _merge_config(self, config):
|
||||||
self.installpkgs.extend(config.get('installpkgs', []))
|
self.installpkgs.extend(config.get("installpkgs", []))
|
||||||
self.add_template.extend(config.get("add_template", []))
|
self.add_template.extend(config.get("add_template", []))
|
||||||
self.add_template_var.extend(config.get("add_template_var"))
|
self.add_template_var.extend(config.get("add_template_var"))
|
||||||
self.add_arch_template.extend(config.get("add_arch_template", []))
|
self.add_arch_template.extend(config.get("add_arch_template", []))
|
||||||
@ -52,7 +52,7 @@ class Installer(OSTree):
|
|||||||
|
|
||||||
self.extra_config = self.args.extra_config
|
self.extra_config = self.args.extra_config
|
||||||
if self.extra_config:
|
if self.extra_config:
|
||||||
self.extra_config = json.load(open(self.extra_config, 'r'))
|
self.extra_config = json.load(open(self.extra_config, "r"))
|
||||||
self._merge_config(self.extra_config)
|
self._merge_config(self.extra_config)
|
||||||
|
|
||||||
lorax_wrapper = lorax.LoraxWrapper()
|
lorax_wrapper = lorax.LoraxWrapper()
|
||||||
@ -72,6 +72,6 @@ class Installer(OSTree):
|
|||||||
add_arch_template_var=self.add_arch_template_var,
|
add_arch_template_var=self.add_arch_template_var,
|
||||||
rootfs_size=self.rootfs_size,
|
rootfs_size=self.rootfs_size,
|
||||||
is_final=self.isfinal,
|
is_final=self.isfinal,
|
||||||
log_dir=self.logdir
|
log_dir=self.logdir,
|
||||||
)
|
)
|
||||||
shortcuts.run(cmd)
|
shortcuts.run(cmd)
|
||||||
|
@ -20,14 +20,18 @@ from kobo import shortcuts
|
|||||||
|
|
||||||
from pungi.util import makedirs
|
from pungi.util import makedirs
|
||||||
from .base import OSTree
|
from .base import OSTree
|
||||||
from .utils import (make_log_file, tweak_treeconf,
|
from .utils import (
|
||||||
get_ref_from_treefile, get_commitid_from_commitid_file)
|
make_log_file,
|
||||||
|
tweak_treeconf,
|
||||||
|
get_ref_from_treefile,
|
||||||
|
get_commitid_from_commitid_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Tree(OSTree):
|
class Tree(OSTree):
|
||||||
def _make_tree(self):
|
def _make_tree(self):
|
||||||
"""Compose OSTree tree"""
|
"""Compose OSTree tree"""
|
||||||
log_file = make_log_file(self.logdir, 'create-ostree-repo')
|
log_file = make_log_file(self.logdir, "create-ostree-repo")
|
||||||
cmd = [
|
cmd = [
|
||||||
"rpm-ostree",
|
"rpm-ostree",
|
||||||
"compose",
|
"compose",
|
||||||
@ -41,11 +45,11 @@ class Tree(OSTree):
|
|||||||
]
|
]
|
||||||
if self.version:
|
if self.version:
|
||||||
# Add versioning metadata
|
# Add versioning metadata
|
||||||
cmd.append('--add-metadata-string=version=%s' % self.version)
|
cmd.append("--add-metadata-string=version=%s" % self.version)
|
||||||
# Note renamed from rpm-ostree --force-nocache since it's a better
|
# Note renamed from rpm-ostree --force-nocache since it's a better
|
||||||
# name; more clearly describes what we're doing here.
|
# name; more clearly describes what we're doing here.
|
||||||
if self.force_new_commit:
|
if self.force_new_commit:
|
||||||
cmd.append('--force-nocache')
|
cmd.append("--force-nocache")
|
||||||
cmd.append(self.treefile)
|
cmd.append(self.treefile)
|
||||||
|
|
||||||
shortcuts.run(
|
shortcuts.run(
|
||||||
@ -54,9 +58,9 @@ class Tree(OSTree):
|
|||||||
|
|
||||||
def _update_summary(self):
|
def _update_summary(self):
|
||||||
"""Update summary metadata"""
|
"""Update summary metadata"""
|
||||||
log_file = make_log_file(self.logdir, 'ostree-summary')
|
log_file = make_log_file(self.logdir, "ostree-summary")
|
||||||
shortcuts.run(
|
shortcuts.run(
|
||||||
['ostree', 'summary', '-u', '--repo=%s' % self.repo],
|
["ostree", "summary", "-u", "--repo=%s" % self.repo],
|
||||||
show_cmd=True,
|
show_cmd=True,
|
||||||
stdout=True,
|
stdout=True,
|
||||||
logfile=log_file,
|
logfile=log_file,
|
||||||
@ -73,24 +77,24 @@ class Tree(OSTree):
|
|||||||
"""
|
"""
|
||||||
tag_ref = True
|
tag_ref = True
|
||||||
if self.extra_config:
|
if self.extra_config:
|
||||||
tag_ref = self.extra_config.get('tag_ref', True)
|
tag_ref = self.extra_config.get("tag_ref", True)
|
||||||
if not tag_ref:
|
if not tag_ref:
|
||||||
print('Not updating ref as configured')
|
print("Not updating ref as configured")
|
||||||
return
|
return
|
||||||
ref = get_ref_from_treefile(self.treefile)
|
ref = get_ref_from_treefile(self.treefile)
|
||||||
commitid = get_commitid_from_commitid_file(self.commitid_file)
|
commitid = get_commitid_from_commitid_file(self.commitid_file)
|
||||||
print('Ref: %r, Commit ID: %r' % (ref, commitid))
|
print("Ref: %r, Commit ID: %r" % (ref, commitid))
|
||||||
if ref and commitid:
|
if ref and commitid:
|
||||||
print('Updating ref')
|
print("Updating ref")
|
||||||
# Let's write the tag out ourselves
|
# Let's write the tag out ourselves
|
||||||
heads_dir = os.path.join(self.repo, 'refs', 'heads')
|
heads_dir = os.path.join(self.repo, "refs", "heads")
|
||||||
if not os.path.exists(heads_dir):
|
if not os.path.exists(heads_dir):
|
||||||
raise RuntimeError('Refs/heads did not exist in ostree repo')
|
raise RuntimeError("Refs/heads did not exist in ostree repo")
|
||||||
|
|
||||||
ref_path = os.path.join(heads_dir, ref)
|
ref_path = os.path.join(heads_dir, ref)
|
||||||
makedirs(os.path.dirname(ref_path))
|
makedirs(os.path.dirname(ref_path))
|
||||||
with open(ref_path, 'w') as f:
|
with open(ref_path, "w") as f:
|
||||||
f.write(commitid + '\n')
|
f.write(commitid + "\n")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.repo = self.args.repo
|
self.repo = self.args.repo
|
||||||
@ -104,9 +108,11 @@ class Tree(OSTree):
|
|||||||
|
|
||||||
if self.extra_config or self.ostree_ref:
|
if self.extra_config or self.ostree_ref:
|
||||||
if self.extra_config:
|
if self.extra_config:
|
||||||
self.extra_config = json.load(open(self.extra_config, 'r'))
|
self.extra_config = json.load(open(self.extra_config, "r"))
|
||||||
repos = self.extra_config.get('repo', [])
|
repos = self.extra_config.get("repo", [])
|
||||||
keep_original_sources = self.extra_config.get('keep_original_sources', False)
|
keep_original_sources = self.extra_config.get(
|
||||||
|
"keep_original_sources", False
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# missing extra_config mustn't affect tweak_treeconf call
|
# missing extra_config mustn't affect tweak_treeconf call
|
||||||
repos = []
|
repos = []
|
||||||
@ -115,16 +121,16 @@ class Tree(OSTree):
|
|||||||
update_dict = {}
|
update_dict = {}
|
||||||
if self.ostree_ref:
|
if self.ostree_ref:
|
||||||
# override ref value in treefile
|
# override ref value in treefile
|
||||||
update_dict['ref'] = self.ostree_ref
|
update_dict["ref"] = self.ostree_ref
|
||||||
|
|
||||||
self.treefile = tweak_treeconf(
|
self.treefile = tweak_treeconf(
|
||||||
self.treefile,
|
self.treefile,
|
||||||
source_repos=repos,
|
source_repos=repos,
|
||||||
keep_original_sources=keep_original_sources,
|
keep_original_sources=keep_original_sources,
|
||||||
update_dict=update_dict
|
update_dict=update_dict,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.commitid_file = make_log_file(self.logdir, 'commitid')
|
self.commitid_file = make_log_file(self.logdir, "commitid")
|
||||||
|
|
||||||
self._make_tree()
|
self._make_tree()
|
||||||
self._update_ref()
|
self._update_ref()
|
||||||
|
@ -30,7 +30,7 @@ def make_log_file(log_dir, filename):
|
|||||||
if not log_dir:
|
if not log_dir:
|
||||||
return None
|
return None
|
||||||
makedirs(log_dir)
|
makedirs(log_dir)
|
||||||
return os.path.join(log_dir, '%s.log' % filename)
|
return os.path.join(log_dir, "%s.log" % filename)
|
||||||
|
|
||||||
|
|
||||||
def get_ref_from_treefile(treefile, arch=None, logger=None):
|
def get_ref_from_treefile(treefile, arch=None, logger=None):
|
||||||
@ -40,7 +40,7 @@ def get_ref_from_treefile(treefile, arch=None, logger=None):
|
|||||||
"""
|
"""
|
||||||
logger = logger or logging.getLogger(__name__)
|
logger = logger or logging.getLogger(__name__)
|
||||||
if os.path.isfile(treefile):
|
if os.path.isfile(treefile):
|
||||||
with open(treefile, 'r') as f:
|
with open(treefile, "r") as f:
|
||||||
try:
|
try:
|
||||||
# rpm-ostree now supports YAML
|
# rpm-ostree now supports YAML
|
||||||
# https://github.com/projectatomic/rpm-ostree/pull/1377
|
# https://github.com/projectatomic/rpm-ostree/pull/1377
|
||||||
@ -50,9 +50,9 @@ def get_ref_from_treefile(treefile, arch=None, logger=None):
|
|||||||
parsed = json.load(f)
|
parsed = json.load(f)
|
||||||
return parsed["ref"].replace("${basearch}", getBaseArch(arch))
|
return parsed["ref"].replace("${basearch}", getBaseArch(arch))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Unable to get ref from treefile: %s' % e)
|
logger.error("Unable to get ref from treefile: %s" % e)
|
||||||
else:
|
else:
|
||||||
logger.error('Unable to open treefile')
|
logger.error("Unable to open treefile")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -61,11 +61,13 @@ def get_commitid_from_commitid_file(commitid_file):
|
|||||||
if not os.path.exists(commitid_file + ".stamp"):
|
if not os.path.exists(commitid_file + ".stamp"):
|
||||||
# The stamp does not exist, so no new commit.
|
# The stamp does not exist, so no new commit.
|
||||||
return None
|
return None
|
||||||
with open(commitid_file, 'r') as f:
|
with open(commitid_file, "r") as f:
|
||||||
return f.read().replace('\n', '')
|
return f.read().replace("\n", "")
|
||||||
|
|
||||||
|
|
||||||
def tweak_treeconf(treeconf, source_repos=None, keep_original_sources=False, update_dict=None):
|
def tweak_treeconf(
|
||||||
|
treeconf, source_repos=None, keep_original_sources=False, update_dict=None
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Update tree config file by adding new repos, and remove existing repos
|
Update tree config file by adding new repos, and remove existing repos
|
||||||
from the tree config file if 'keep_original_sources' is not enabled.
|
from the tree config file if 'keep_original_sources' is not enabled.
|
||||||
@ -74,51 +76,51 @@ def tweak_treeconf(treeconf, source_repos=None, keep_original_sources=False, upd
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# backup the old tree config
|
# backup the old tree config
|
||||||
shutil.copy2(treeconf, '{0}.bak'.format(treeconf))
|
shutil.copy2(treeconf, "{0}.bak".format(treeconf))
|
||||||
|
|
||||||
treeconf_dir = os.path.dirname(treeconf)
|
treeconf_dir = os.path.dirname(treeconf)
|
||||||
with open(treeconf, 'r') as f:
|
with open(treeconf, "r") as f:
|
||||||
# rpm-ostree now supports YAML, but we'll end up converting it to JSON.
|
# rpm-ostree now supports YAML, but we'll end up converting it to JSON.
|
||||||
# https://github.com/projectatomic/rpm-ostree/pull/1377
|
# https://github.com/projectatomic/rpm-ostree/pull/1377
|
||||||
if treeconf.endswith('.yaml'):
|
if treeconf.endswith(".yaml"):
|
||||||
treeconf_content = yaml.safe_load(f)
|
treeconf_content = yaml.safe_load(f)
|
||||||
treeconf = treeconf.replace('.yaml', '.json')
|
treeconf = treeconf.replace(".yaml", ".json")
|
||||||
else:
|
else:
|
||||||
treeconf_content = json.load(f)
|
treeconf_content = json.load(f)
|
||||||
|
|
||||||
repos = []
|
repos = []
|
||||||
if source_repos:
|
if source_repos:
|
||||||
# Sort to ensure reliable ordering
|
# Sort to ensure reliable ordering
|
||||||
source_repos = sorted(source_repos, key=lambda x: x['name'])
|
source_repos = sorted(source_repos, key=lambda x: x["name"])
|
||||||
# Now, since pungi includes timestamps in the repo names which
|
# Now, since pungi includes timestamps in the repo names which
|
||||||
# currently defeats rpm-ostree's change detection, let's just
|
# currently defeats rpm-ostree's change detection, let's just
|
||||||
# use repos named 'repo-<number>'.
|
# use repos named 'repo-<number>'.
|
||||||
# https://pagure.io/pungi/issue/811
|
# https://pagure.io/pungi/issue/811
|
||||||
with open("{0}/pungi.repo".format(treeconf_dir), 'w') as f:
|
with open("{0}/pungi.repo".format(treeconf_dir), "w") as f:
|
||||||
for i, repo in enumerate(source_repos):
|
for i, repo in enumerate(source_repos):
|
||||||
name = 'repo-{0}'.format(i)
|
name = "repo-{0}".format(i)
|
||||||
f.write("[%s]\n" % name)
|
f.write("[%s]\n" % name)
|
||||||
f.write("name=%s\n" % name)
|
f.write("name=%s\n" % name)
|
||||||
f.write("baseurl=%s\n" % repo['baseurl'])
|
f.write("baseurl=%s\n" % repo["baseurl"])
|
||||||
exclude = repo.get('exclude', None)
|
exclude = repo.get("exclude", None)
|
||||||
if exclude:
|
if exclude:
|
||||||
f.write("exclude=%s\n" % exclude)
|
f.write("exclude=%s\n" % exclude)
|
||||||
gpgcheck = '1' if repo.get('gpgcheck', False) else '0'
|
gpgcheck = "1" if repo.get("gpgcheck", False) else "0"
|
||||||
f.write("gpgcheck=%s\n" % gpgcheck)
|
f.write("gpgcheck=%s\n" % gpgcheck)
|
||||||
|
|
||||||
repos.append(name)
|
repos.append(name)
|
||||||
|
|
||||||
original_repos = treeconf_content.get('repos', [])
|
original_repos = treeconf_content.get("repos", [])
|
||||||
if keep_original_sources:
|
if keep_original_sources:
|
||||||
treeconf_content['repos'] = original_repos + repos
|
treeconf_content["repos"] = original_repos + repos
|
||||||
else:
|
else:
|
||||||
treeconf_content['repos'] = repos
|
treeconf_content["repos"] = repos
|
||||||
|
|
||||||
# update content with config values from dictionary (for example 'ref')
|
# update content with config values from dictionary (for example 'ref')
|
||||||
if isinstance(update_dict, dict):
|
if isinstance(update_dict, dict):
|
||||||
treeconf_content.update(update_dict)
|
treeconf_content.update(update_dict)
|
||||||
|
|
||||||
# update tree config to add new repos
|
# update tree config to add new repos
|
||||||
with open(treeconf, 'w') as f:
|
with open(treeconf, "w") as f:
|
||||||
json.dump(treeconf_content, f, indent=4)
|
json.dump(treeconf_content, f, indent=4)
|
||||||
return treeconf
|
return treeconf
|
||||||
|
190
pungi/paths.py
190
pungi/paths.py
@ -14,9 +14,7 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = ("Paths",)
|
||||||
"Paths",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
@ -30,7 +28,12 @@ class Paths(object):
|
|||||||
paths_module_name = compose.conf.get("paths_module")
|
paths_module_name = compose.conf.get("paths_module")
|
||||||
if paths_module_name:
|
if paths_module_name:
|
||||||
# custom paths
|
# custom paths
|
||||||
paths_module = __import__(paths_module_name, globals(), locals(), ["LogPaths", "WorkPaths", "ComposePaths"])
|
paths_module = __import__(
|
||||||
|
paths_module_name,
|
||||||
|
globals(),
|
||||||
|
locals(),
|
||||||
|
["LogPaths", "WorkPaths", "ComposePaths"],
|
||||||
|
)
|
||||||
self.compose = paths_module.ComposePaths(compose)
|
self.compose = paths_module.ComposePaths(compose)
|
||||||
self.log = paths_module.LogPaths(compose)
|
self.log = paths_module.LogPaths(compose)
|
||||||
self.work = paths_module.WorkPaths(compose)
|
self.work = paths_module.WorkPaths(compose)
|
||||||
@ -62,7 +65,9 @@ class LogPaths(object):
|
|||||||
arch = arch or "global"
|
arch = arch or "global"
|
||||||
if log_name.endswith(".log"):
|
if log_name.endswith(".log"):
|
||||||
log_name = log_name[:-4]
|
log_name = log_name[:-4]
|
||||||
return os.path.join(self.topdir(arch, create_dir=create_dir), "%s.%s.log" % (log_name, arch))
|
return os.path.join(
|
||||||
|
self.topdir(arch, create_dir=create_dir), "%s.%s.log" % (log_name, arch)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class WorkPaths(object):
|
class WorkPaths(object):
|
||||||
@ -114,13 +119,13 @@ class WorkPaths(object):
|
|||||||
work/x86_64/pungi/Server.x86_64.conf
|
work/x86_64/pungi/Server.x86_64.conf
|
||||||
"""
|
"""
|
||||||
arch = arch or "global"
|
arch = arch or "global"
|
||||||
file_name = ''
|
file_name = ""
|
||||||
if variant:
|
if variant:
|
||||||
file_name += variant.uid + '.'
|
file_name += variant.uid + "."
|
||||||
file_name += arch + '.'
|
file_name += arch + "."
|
||||||
if source_name:
|
if source_name:
|
||||||
file_name += source_name + '.'
|
file_name += source_name + "."
|
||||||
file_name += 'conf'
|
file_name += "conf"
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi")
|
path = os.path.join(self.topdir(arch, create_dir=create_dir), "pungi")
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
@ -147,7 +152,7 @@ class WorkPaths(object):
|
|||||||
path = self.pungi_conf(arch, variant, create_dir=create_dir)
|
path = self.pungi_conf(arch, variant, create_dir=create_dir)
|
||||||
path = path[:-5]
|
path = path[:-5]
|
||||||
if source_name:
|
if source_name:
|
||||||
path += '.' + source_name
|
path += "." + source_name
|
||||||
return path + ".log"
|
return path + ".log"
|
||||||
|
|
||||||
def pungi_cache_dir(self, arch, variant=None, create_dir=True):
|
def pungi_cache_dir(self, arch, variant=None, create_dir=True):
|
||||||
@ -200,13 +205,16 @@ class WorkPaths(object):
|
|||||||
Examples:
|
Examples:
|
||||||
work/x86_64/Server/lookaside_repo
|
work/x86_64/Server/lookaside_repo
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir),
|
path = os.path.join(
|
||||||
variant.uid, "lookaside_repo")
|
self.topdir(arch, create_dir=create_dir), variant.uid, "lookaside_repo"
|
||||||
|
)
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def package_list(self, arch=None, variant=None, pkgset=None, pkg_type=None, create_dir=True):
|
def package_list(
|
||||||
|
self, arch=None, variant=None, pkgset=None, pkg_type=None, create_dir=True
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Examples:
|
Examples:
|
||||||
work/x86_64/package_list/x86_64.conf
|
work/x86_64/package_list/x86_64.conf
|
||||||
@ -234,7 +242,9 @@ class WorkPaths(object):
|
|||||||
Examples:
|
Examples:
|
||||||
work/x86_64/package_list/Server.x86_64.lookaside.conf
|
work/x86_64/package_list/Server.x86_64.lookaside.conf
|
||||||
"""
|
"""
|
||||||
return self.package_list(arch, variant, pkg_type='lookaside', create_dir=create_dir)
|
return self.package_list(
|
||||||
|
arch, variant, pkg_type="lookaside", create_dir=create_dir
|
||||||
|
)
|
||||||
|
|
||||||
def pungi_download_dir(self, arch, create_dir=True):
|
def pungi_download_dir(self, arch, create_dir=True):
|
||||||
"""
|
"""
|
||||||
@ -246,8 +256,9 @@ class WorkPaths(object):
|
|||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def buildinstall_dir(self, arch, create_dir=True,
|
def buildinstall_dir(
|
||||||
allow_topdir_override=False, variant=None):
|
self, arch, create_dir=True, allow_topdir_override=False, variant=None
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param bool allow_topdir_override: When True, the
|
:param bool allow_topdir_override: When True, the
|
||||||
"buildinstall_topdir" will be used (if set) instead of real
|
"buildinstall_topdir" will be used (if set) instead of real
|
||||||
@ -262,9 +273,12 @@ class WorkPaths(object):
|
|||||||
if allow_topdir_override and buildinstall_topdir:
|
if allow_topdir_override and buildinstall_topdir:
|
||||||
topdir_basename = os.path.basename(self.compose.topdir)
|
topdir_basename = os.path.basename(self.compose.topdir)
|
||||||
path = os.path.join(
|
path = os.path.join(
|
||||||
buildinstall_topdir, "buildinstall-%s" % topdir_basename, arch)
|
buildinstall_topdir, "buildinstall-%s" % topdir_basename, arch
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "buildinstall")
|
path = os.path.join(
|
||||||
|
self.topdir(arch, create_dir=create_dir), "buildinstall"
|
||||||
|
)
|
||||||
|
|
||||||
if variant:
|
if variant:
|
||||||
path = os.path.join(path, variant.uid)
|
path = os.path.join(path, variant.uid)
|
||||||
@ -277,7 +291,9 @@ class WorkPaths(object):
|
|||||||
"""
|
"""
|
||||||
if arch == "global":
|
if arch == "global":
|
||||||
raise RuntimeError("Global extra files dir makes no sense.")
|
raise RuntimeError("Global extra files dir makes no sense.")
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), variant.uid, "extra-files")
|
path = os.path.join(
|
||||||
|
self.topdir(arch, create_dir=create_dir), variant.uid, "extra-files"
|
||||||
|
)
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -289,7 +305,11 @@ class WorkPaths(object):
|
|||||||
"""
|
"""
|
||||||
if arch == "global":
|
if arch == "global":
|
||||||
raise RuntimeError("Global extra files dir makes no sense.")
|
raise RuntimeError("Global extra files dir makes no sense.")
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), variant.uid, "extra-iso-extra-files")
|
path = os.path.join(
|
||||||
|
self.topdir(arch, create_dir=create_dir),
|
||||||
|
variant.uid,
|
||||||
|
"extra-iso-extra-files",
|
||||||
|
)
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -303,7 +323,7 @@ class WorkPaths(object):
|
|||||||
self.topdir(arch, create_dir=create_dir),
|
self.topdir(arch, create_dir=create_dir),
|
||||||
variant.uid,
|
variant.uid,
|
||||||
"iso-staging-dir",
|
"iso-staging-dir",
|
||||||
filename
|
filename,
|
||||||
)
|
)
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
@ -318,7 +338,9 @@ class WorkPaths(object):
|
|||||||
if pkg_type is not None:
|
if pkg_type is not None:
|
||||||
file_name += ".%s" % pkg_type
|
file_name += ".%s" % pkg_type
|
||||||
file_name += ".conf"
|
file_name += ".conf"
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "repo_package_list")
|
path = os.path.join(
|
||||||
|
self.topdir(arch, create_dir=create_dir), "repo_package_list"
|
||||||
|
)
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
path = os.path.join(path, file_name)
|
path = os.path.join(path, file_name)
|
||||||
@ -357,7 +379,11 @@ class WorkPaths(object):
|
|||||||
# file_name = "%s.%s.pem" % (variant, arch)
|
# file_name = "%s.%s.pem" % (variant, arch)
|
||||||
# HACK: modifyrepo doesn't handle renames -> $dir/productid
|
# HACK: modifyrepo doesn't handle renames -> $dir/productid
|
||||||
file_name = "productid"
|
file_name = "productid"
|
||||||
path = os.path.join(self.topdir(arch, create_dir=create_dir), "product_id", "%s.%s.pem" % (variant, arch))
|
path = os.path.join(
|
||||||
|
self.topdir(arch, create_dir=create_dir),
|
||||||
|
"product_id",
|
||||||
|
"%s.%s.pem" % (variant, arch),
|
||||||
|
)
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
path = os.path.join(path, file_name)
|
path = os.path.join(path, file_name)
|
||||||
@ -371,12 +397,16 @@ class WorkPaths(object):
|
|||||||
Examples:
|
Examples:
|
||||||
work/image-build/Server
|
work/image-build/Server
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.topdir('image-build', create_dir=create_dir), variant.uid)
|
path = os.path.join(
|
||||||
|
self.topdir("image-build", create_dir=create_dir), variant.uid
|
||||||
|
)
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def image_build_conf(self, variant, image_name, image_type, arches=None, create_dir=True):
|
def image_build_conf(
|
||||||
|
self, variant, image_name, image_type, arches=None, create_dir=True
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
@param variant
|
@param variant
|
||||||
@param image-name
|
@param image-name
|
||||||
@ -389,16 +419,18 @@ class WorkPaths(object):
|
|||||||
work/image-build/Server/docker_rhel-server-docker_x86_64.cfg
|
work/image-build/Server/docker_rhel-server-docker_x86_64.cfg
|
||||||
work/image-build/Server/docker_rhel-server-docker_x86_64-ppc64le.cfg
|
work/image-build/Server/docker_rhel-server-docker_x86_64-ppc64le.cfg
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.image_build_dir(variant), "%s_%s" % (image_type, image_name))
|
path = os.path.join(
|
||||||
|
self.image_build_dir(variant), "%s_%s" % (image_type, image_name)
|
||||||
|
)
|
||||||
if arches is not None:
|
if arches is not None:
|
||||||
path = "%s_%s" % (path, '-'.join(list(arches)))
|
path = "%s_%s" % (path, "-".join(list(arches)))
|
||||||
path = "%s.cfg" % path
|
path = "%s.cfg" % path
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def module_defaults_dir(self, create_dir=True):
|
def module_defaults_dir(self, create_dir=True):
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.topdir(create_dir=create_dir), 'module_defaults')
|
path = os.path.join(self.topdir(create_dir=create_dir), "module_defaults")
|
||||||
if create_dir:
|
if create_dir:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -432,7 +464,9 @@ class ComposePaths(object):
|
|||||||
|
|
||||||
if arch or variant:
|
if arch or variant:
|
||||||
if variant.type == "addon":
|
if variant.type == "addon":
|
||||||
return self.topdir(arch, variant.parent, create_dir=create_dir, relative=relative)
|
return self.topdir(
|
||||||
|
arch, variant.parent, create_dir=create_dir, relative=relative
|
||||||
|
)
|
||||||
path = os.path.join(path, variant.uid, arch)
|
path = os.path.join(path, variant.uid, arch)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
@ -453,7 +487,10 @@ class ComposePaths(object):
|
|||||||
# use 'os' dir due to historical reasons
|
# use 'os' dir due to historical reasons
|
||||||
tree_dir = "os"
|
tree_dir = "os"
|
||||||
|
|
||||||
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), tree_dir)
|
path = os.path.join(
|
||||||
|
self.topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||||
|
tree_dir,
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -468,9 +505,13 @@ class ComposePaths(object):
|
|||||||
compose/Server/x86_64/addons/LoadBalancer
|
compose/Server/x86_64/addons/LoadBalancer
|
||||||
"""
|
"""
|
||||||
if variant.type == "addon":
|
if variant.type == "addon":
|
||||||
path = self.packages(arch, variant, create_dir=create_dir, relative=relative)
|
path = self.packages(
|
||||||
|
arch, variant, create_dir=create_dir, relative=relative
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
path = self.tree_dir(arch, variant, create_dir=create_dir, relative=relative)
|
path = self.tree_dir(
|
||||||
|
arch, variant, create_dir=create_dir, relative=relative
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -483,9 +524,16 @@ class ComposePaths(object):
|
|||||||
compose/Server-optional/x86_64/os/Packages
|
compose/Server-optional/x86_64/os/Packages
|
||||||
"""
|
"""
|
||||||
if variant.type == "addon":
|
if variant.type == "addon":
|
||||||
path = os.path.join(self.tree_dir(arch, variant, create_dir=create_dir, relative=relative), "addons", variant.id)
|
path = os.path.join(
|
||||||
|
self.tree_dir(arch, variant, create_dir=create_dir, relative=relative),
|
||||||
|
"addons",
|
||||||
|
variant.id,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
path = os.path.join(self.tree_dir(arch, variant, create_dir=create_dir, relative=relative), "Packages")
|
path = os.path.join(
|
||||||
|
self.tree_dir(arch, variant, create_dir=create_dir, relative=relative),
|
||||||
|
"Packages",
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -496,7 +544,10 @@ class ComposePaths(object):
|
|||||||
compose/Server/x86_64/debug
|
compose/Server/x86_64/debug
|
||||||
compose/Server-optional/x86_64/debug
|
compose/Server-optional/x86_64/debug
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), "debug")
|
path = os.path.join(
|
||||||
|
self.topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||||
|
"debug",
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -507,7 +558,10 @@ class ComposePaths(object):
|
|||||||
compose/Server/x86_64/debug/tree
|
compose/Server/x86_64/debug/tree
|
||||||
compose/Server-optional/x86_64/debug/tree
|
compose/Server-optional/x86_64/debug/tree
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.debug_topdir(arch, variant, create_dir=create_dir, relative=relative), "tree")
|
path = os.path.join(
|
||||||
|
self.debug_topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||||
|
"tree",
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -522,9 +576,20 @@ class ComposePaths(object):
|
|||||||
if arch in ("source", "src"):
|
if arch in ("source", "src"):
|
||||||
return None
|
return None
|
||||||
if variant.type == "addon":
|
if variant.type == "addon":
|
||||||
path = os.path.join(self.debug_tree(arch, variant, create_dir=create_dir, relative=relative), "addons", variant.id)
|
path = os.path.join(
|
||||||
|
self.debug_tree(
|
||||||
|
arch, variant, create_dir=create_dir, relative=relative
|
||||||
|
),
|
||||||
|
"addons",
|
||||||
|
variant.id,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
path = os.path.join(self.debug_tree(arch, variant, create_dir=create_dir, relative=relative), "Packages")
|
path = os.path.join(
|
||||||
|
self.debug_tree(
|
||||||
|
arch, variant, create_dir=create_dir, relative=relative
|
||||||
|
),
|
||||||
|
"Packages",
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -539,9 +604,17 @@ class ComposePaths(object):
|
|||||||
if arch in ("source", "src"):
|
if arch in ("source", "src"):
|
||||||
return None
|
return None
|
||||||
if variant.type == "addon":
|
if variant.type == "addon":
|
||||||
path = os.path.join(self.debug_tree(arch, variant, create_dir=create_dir, relative=relative), "addons", variant.id)
|
path = os.path.join(
|
||||||
|
self.debug_tree(
|
||||||
|
arch, variant, create_dir=create_dir, relative=relative
|
||||||
|
),
|
||||||
|
"addons",
|
||||||
|
variant.id,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
path = self.debug_tree(arch, variant, create_dir=create_dir, relative=relative)
|
path = self.debug_tree(
|
||||||
|
arch, variant, create_dir=create_dir, relative=relative
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
@ -559,12 +632,14 @@ class ComposePaths(object):
|
|||||||
return None
|
return None
|
||||||
if arch == "src":
|
if arch == "src":
|
||||||
arch = "source"
|
arch = "source"
|
||||||
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), "iso")
|
path = os.path.join(
|
||||||
|
self.topdir(arch, variant, create_dir=create_dir, relative=relative), "iso"
|
||||||
|
)
|
||||||
|
|
||||||
if symlink_to:
|
if symlink_to:
|
||||||
# TODO: create_dir
|
# TODO: create_dir
|
||||||
topdir = self.compose.topdir.rstrip("/") + "/"
|
topdir = self.compose.topdir.rstrip("/") + "/"
|
||||||
relative_dir = path[len(topdir):]
|
relative_dir = path[len(topdir) :]
|
||||||
target_dir = os.path.join(symlink_to, self.compose.compose_id, relative_dir)
|
target_dir = os.path.join(symlink_to, self.compose.compose_id, relative_dir)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
@ -583,13 +658,21 @@ class ComposePaths(object):
|
|||||||
makedirs(path)
|
makedirs(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def iso_path(self, arch, variant, filename, symlink_to=None, create_dir=True, relative=False):
|
def iso_path(
|
||||||
|
self, arch, variant, filename, symlink_to=None, create_dir=True, relative=False
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Examples:
|
Examples:
|
||||||
compose/Server/x86_64/iso/rhel-7.0-20120127.0-Server-x86_64-dvd1.iso
|
compose/Server/x86_64/iso/rhel-7.0-20120127.0-Server-x86_64-dvd1.iso
|
||||||
None
|
None
|
||||||
"""
|
"""
|
||||||
path = self.iso_dir(arch, variant, symlink_to=symlink_to, create_dir=create_dir, relative=relative)
|
path = self.iso_dir(
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
|
symlink_to=symlink_to,
|
||||||
|
create_dir=create_dir,
|
||||||
|
relative=relative,
|
||||||
|
)
|
||||||
if path is None:
|
if path is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -605,11 +688,13 @@ class ComposePaths(object):
|
|||||||
@param symlink_to=None
|
@param symlink_to=None
|
||||||
@param relative=False
|
@param relative=False
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.topdir('%(arch)s', variant, create_dir=False, relative=relative),
|
path = os.path.join(
|
||||||
"images")
|
self.topdir("%(arch)s", variant, create_dir=False, relative=relative),
|
||||||
|
"images",
|
||||||
|
)
|
||||||
if symlink_to:
|
if symlink_to:
|
||||||
topdir = self.compose.topdir.rstrip("/") + "/"
|
topdir = self.compose.topdir.rstrip("/") + "/"
|
||||||
relative_dir = path[len(topdir):]
|
relative_dir = path[len(topdir) :]
|
||||||
target_dir = os.path.join(symlink_to, self.compose.compose_id, relative_dir)
|
target_dir = os.path.join(symlink_to, self.compose.compose_id, relative_dir)
|
||||||
try:
|
try:
|
||||||
os.symlink(target_dir, path)
|
os.symlink(target_dir, path)
|
||||||
@ -636,7 +721,10 @@ class ComposePaths(object):
|
|||||||
return None
|
return None
|
||||||
if arch == "src":
|
if arch == "src":
|
||||||
arch = "source"
|
arch = "source"
|
||||||
path = os.path.join(self.topdir(arch, variant, create_dir=create_dir, relative=relative), "jigdo")
|
path = os.path.join(
|
||||||
|
self.topdir(arch, variant, create_dir=create_dir, relative=relative),
|
||||||
|
"jigdo",
|
||||||
|
)
|
||||||
|
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
@ -648,7 +736,9 @@ class ComposePaths(object):
|
|||||||
compose/metadata
|
compose/metadata
|
||||||
compose/metadata/rpms.json
|
compose/metadata/rpms.json
|
||||||
"""
|
"""
|
||||||
path = os.path.join(self.topdir(create_dir=create_dir, relative=relative), "metadata")
|
path = os.path.join(
|
||||||
|
self.topdir(create_dir=create_dir, relative=relative), "metadata"
|
||||||
|
)
|
||||||
if create_dir and not relative:
|
if create_dir and not relative:
|
||||||
makedirs(path)
|
makedirs(path)
|
||||||
if file_name:
|
if file_name:
|
||||||
|
@ -19,7 +19,6 @@ from pungi import util
|
|||||||
|
|
||||||
|
|
||||||
class PhaseBase(object):
|
class PhaseBase(object):
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
self.msg = "---------- PHASE: %s ----------" % self.name.upper()
|
self.msg = "---------- PHASE: %s ----------" % self.name.upper()
|
||||||
@ -60,7 +59,7 @@ class PhaseBase(object):
|
|||||||
self.finished = True
|
self.finished = True
|
||||||
return
|
return
|
||||||
self.compose.log_info("[BEGIN] %s" % self.msg)
|
self.compose.log_info("[BEGIN] %s" % self.msg)
|
||||||
self.compose.notifier.send('phase-start', phase_name=self.name)
|
self.compose.notifier.send("phase-start", phase_name=self.name)
|
||||||
self.run()
|
self.run()
|
||||||
|
|
||||||
def get_config_block(self, variant, arch=None):
|
def get_config_block(self, variant, arch=None):
|
||||||
@ -70,11 +69,13 @@ class PhaseBase(object):
|
|||||||
"""
|
"""
|
||||||
self.used_patterns = self.used_patterns or set()
|
self.used_patterns = self.used_patterns or set()
|
||||||
if arch is not None:
|
if arch is not None:
|
||||||
return util.get_arch_variant_data(self.compose.conf, self.name,
|
return util.get_arch_variant_data(
|
||||||
arch, variant, keys=self.used_patterns)
|
self.compose.conf, self.name, arch, variant, keys=self.used_patterns
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return util.get_variant_data(self.compose.conf, self.name,
|
return util.get_variant_data(
|
||||||
variant, keys=self.used_patterns)
|
self.compose.conf, self.name, variant, keys=self.used_patterns
|
||||||
|
)
|
||||||
|
|
||||||
def get_all_patterns(self):
|
def get_all_patterns(self):
|
||||||
"""Get all variant patterns from config file for this phase."""
|
"""Get all variant patterns from config file for this phase."""
|
||||||
@ -93,10 +94,12 @@ class PhaseBase(object):
|
|||||||
unused_patterns = all_patterns - self.used_patterns
|
unused_patterns = all_patterns - self.used_patterns
|
||||||
if unused_patterns:
|
if unused_patterns:
|
||||||
self.compose.log_warning(
|
self.compose.log_warning(
|
||||||
'[%s] Patterns in config do not match any variant: %s'
|
"[%s] Patterns in config do not match any variant: %s"
|
||||||
% (self.name.upper(), ', '.join(sorted(unused_patterns))))
|
% (self.name.upper(), ", ".join(sorted(unused_patterns)))
|
||||||
|
)
|
||||||
self.compose.log_info(
|
self.compose.log_info(
|
||||||
'Note that variants can be excluded in configuration file')
|
"Note that variants can be excluded in configuration file"
|
||||||
|
)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
if self.finished:
|
if self.finished:
|
||||||
@ -108,7 +111,7 @@ class PhaseBase(object):
|
|||||||
if self.used_patterns is not None:
|
if self.used_patterns is not None:
|
||||||
# We only want to report this if the config was actually queried.
|
# We only want to report this if the config was actually queried.
|
||||||
self.report_unused_patterns()
|
self.report_unused_patterns()
|
||||||
self.compose.notifier.send('phase-stop', phase_name=self.name)
|
self.compose.notifier.send("phase-stop", phase_name=self.name)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
@ -121,7 +124,9 @@ class ConfigGuardedPhase(PhaseBase):
|
|||||||
if super(ConfigGuardedPhase, self).skip():
|
if super(ConfigGuardedPhase, self).skip():
|
||||||
return True
|
return True
|
||||||
if not self.compose.conf.get(self.name):
|
if not self.compose.conf.get(self.name):
|
||||||
self.compose.log_info("Config section '%s' was not found. Skipping." % self.name)
|
self.compose.log_info(
|
||||||
|
"Config section '%s' was not found. Skipping." % self.name
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -140,9 +145,11 @@ class ImageConfigMixin(object):
|
|||||||
|
|
||||||
def get_config(self, cfg, opt):
|
def get_config(self, cfg, opt):
|
||||||
return cfg.get(
|
return cfg.get(
|
||||||
opt, self.compose.conf.get(
|
opt,
|
||||||
'%s_%s' % (self.name, opt), self.compose.conf.get(
|
self.compose.conf.get(
|
||||||
'global_%s' % opt)))
|
"%s_%s" % (self.name, opt), self.compose.conf.get("global_%s" % opt)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
def get_version(self, cfg):
|
def get_version(self, cfg):
|
||||||
"""
|
"""
|
||||||
@ -161,11 +168,16 @@ class ImageConfigMixin(object):
|
|||||||
deprecated), replace it with a generated value. Uses configuration
|
deprecated), replace it with a generated value. Uses configuration
|
||||||
passed as argument, phase specific settings and global settings.
|
passed as argument, phase specific settings and global settings.
|
||||||
"""
|
"""
|
||||||
for key, conf in [('release', cfg),
|
for key, conf in [
|
||||||
('%s_release' % self.name, self.compose.conf),
|
("release", cfg),
|
||||||
('global_release', self.compose.conf)]:
|
("%s_release" % self.name, self.compose.conf),
|
||||||
|
("global_release", self.compose.conf),
|
||||||
|
]:
|
||||||
if key in conf:
|
if key in conf:
|
||||||
return util.version_generator(self.compose, conf[key]) or self.compose.image_release
|
return (
|
||||||
|
util.version_generator(self.compose, conf[key])
|
||||||
|
or self.compose.image_release
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_ksurl(self, cfg):
|
def get_ksurl(self, cfg):
|
||||||
@ -185,6 +197,7 @@ class PhaseLoggerMixin(object):
|
|||||||
A mixin that can extend a phase with a new logging logger that copy
|
A mixin that can extend a phase with a new logging logger that copy
|
||||||
handlers from compose, but with different formatter that includes phase name.
|
handlers from compose, but with different formatter that includes phase name.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(PhaseLoggerMixin, self).__init__(*args, **kwargs)
|
super(PhaseLoggerMixin, self).__init__(*args, **kwargs)
|
||||||
self.logger = None
|
self.logger = None
|
||||||
@ -193,6 +206,7 @@ class PhaseLoggerMixin(object):
|
|||||||
self.logger.setLevel(logging.DEBUG)
|
self.logger.setLevel(logging.DEBUG)
|
||||||
format = "%(asctime)s [%(name)-16s] [%(levelname)-8s] %(message)s"
|
format = "%(asctime)s [%(name)-16s] [%(levelname)-8s] %(message)s"
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
for handler in self.compose._logger.handlers:
|
for handler in self.compose._logger.handlers:
|
||||||
hl = copy.copy(handler)
|
hl = copy.copy(handler)
|
||||||
hl.setFormatter(logging.Formatter(format, datefmt="%Y-%m-%d %H:%M:%S"))
|
hl.setFormatter(logging.Formatter(format, datefmt="%Y-%m-%d %H:%M:%S"))
|
||||||
|
@ -47,7 +47,7 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
self.pool.finished_tasks = set()
|
self.pool.finished_tasks = set()
|
||||||
self.buildinstall_method = self.compose.conf.get("buildinstall_method")
|
self.buildinstall_method = self.compose.conf.get("buildinstall_method")
|
||||||
self.lorax_use_koji_plugin = self.compose.conf.get("lorax_use_koji_plugin")
|
self.lorax_use_koji_plugin = self.compose.conf.get("lorax_use_koji_plugin")
|
||||||
self.used_lorax = self.buildinstall_method == 'lorax'
|
self.used_lorax = self.buildinstall_method == "lorax"
|
||||||
self.pkgset_phase = pkgset_phase
|
self.pkgset_phase = pkgset_phase
|
||||||
|
|
||||||
self.warned_skipped = False
|
self.warned_skipped = False
|
||||||
@ -63,7 +63,16 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _get_lorax_cmd(self, repo_baseurl, output_dir, variant, arch, buildarch, volid, final_output_dir):
|
def _get_lorax_cmd(
|
||||||
|
self,
|
||||||
|
repo_baseurl,
|
||||||
|
output_dir,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
buildarch,
|
||||||
|
volid,
|
||||||
|
final_output_dir,
|
||||||
|
):
|
||||||
noupgrade = True
|
noupgrade = True
|
||||||
bugurl = None
|
bugurl = None
|
||||||
nomacboot = True
|
nomacboot = True
|
||||||
@ -76,19 +85,21 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
version = self.compose.conf.get(
|
version = self.compose.conf.get(
|
||||||
"treeinfo_version", self.compose.conf["release_version"]
|
"treeinfo_version", self.compose.conf["release_version"]
|
||||||
)
|
)
|
||||||
for data in get_arch_variant_data(self.compose.conf, 'lorax_options', arch, variant):
|
for data in get_arch_variant_data(
|
||||||
if not data.get('noupgrade', True):
|
self.compose.conf, "lorax_options", arch, variant
|
||||||
|
):
|
||||||
|
if not data.get("noupgrade", True):
|
||||||
noupgrade = False
|
noupgrade = False
|
||||||
if data.get('bugurl'):
|
if data.get("bugurl"):
|
||||||
bugurl = data.get('bugurl')
|
bugurl = data.get("bugurl")
|
||||||
if not data.get('nomacboot', True):
|
if not data.get("nomacboot", True):
|
||||||
nomacboot = False
|
nomacboot = False
|
||||||
if "rootfs_size" in data:
|
if "rootfs_size" in data:
|
||||||
rootfs_size = data.get("rootfs_size")
|
rootfs_size = data.get("rootfs_size")
|
||||||
add_template.extend(data.get('add_template', []))
|
add_template.extend(data.get("add_template", []))
|
||||||
add_arch_template.extend(data.get('add_arch_template', []))
|
add_arch_template.extend(data.get("add_arch_template", []))
|
||||||
add_template_var.extend(data.get('add_template_var', []))
|
add_template_var.extend(data.get("add_template_var", []))
|
||||||
add_arch_template_var.extend(data.get('add_arch_template_var', []))
|
add_arch_template_var.extend(data.get("add_arch_template_var", []))
|
||||||
dracut_args.extend(data.get("dracut_args", []))
|
dracut_args.extend(data.get("dracut_args", []))
|
||||||
if "version" in data:
|
if "version" in data:
|
||||||
version = data["version"]
|
version = data["version"]
|
||||||
@ -101,7 +112,9 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
|
|
||||||
repos = repo_baseurl[:]
|
repos = repo_baseurl[:]
|
||||||
repos.extend(
|
repos.extend(
|
||||||
get_arch_variant_data(self.compose.conf, "lorax_extra_sources", arch, variant)
|
get_arch_variant_data(
|
||||||
|
self.compose.conf, "lorax_extra_sources", arch, variant
|
||||||
|
)
|
||||||
)
|
)
|
||||||
if self.compose.has_comps:
|
if self.compose.has_comps:
|
||||||
comps_repo = self.compose.paths.work.comps_repo(arch, variant)
|
comps_repo = self.compose.paths.work.comps_repo(arch, variant)
|
||||||
@ -162,8 +175,10 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
log_dir=log_dir,
|
log_dir=log_dir,
|
||||||
dracut_args=dracut_args,
|
dracut_args=dracut_args,
|
||||||
)
|
)
|
||||||
return 'rm -rf %s && %s' % (shlex_quote(output_topdir),
|
return "rm -rf %s && %s" % (
|
||||||
' '.join([shlex_quote(x) for x in lorax_cmd]))
|
shlex_quote(output_topdir),
|
||||||
|
" ".join([shlex_quote(x) for x in lorax_cmd]),
|
||||||
|
)
|
||||||
|
|
||||||
def get_repos(self, arch):
|
def get_repos(self, arch):
|
||||||
repos = []
|
repos = []
|
||||||
@ -176,7 +191,7 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
product = self.compose.conf["release_name"]
|
product = self.compose.conf["release_name"]
|
||||||
version = self.compose.conf["release_version"]
|
version = self.compose.conf["release_version"]
|
||||||
release = self.compose.conf["release_version"]
|
release = self.compose.conf["release_version"]
|
||||||
disc_type = self.compose.conf['disc_types'].get('dvd', 'dvd')
|
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
|
|
||||||
# Prepare kickstart file for final images.
|
# Prepare kickstart file for final images.
|
||||||
self.pool.kickstart_file = get_kickstart_file(self.compose)
|
self.pool.kickstart_file = get_kickstart_file(self.compose)
|
||||||
@ -184,8 +199,12 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
for arch in self.compose.get_arches():
|
for arch in self.compose.get_arches():
|
||||||
commands = []
|
commands = []
|
||||||
|
|
||||||
output_dir = self.compose.paths.work.buildinstall_dir(arch, allow_topdir_override=True)
|
output_dir = self.compose.paths.work.buildinstall_dir(
|
||||||
final_output_dir = self.compose.paths.work.buildinstall_dir(arch, allow_topdir_override=False)
|
arch, allow_topdir_override=True
|
||||||
|
)
|
||||||
|
final_output_dir = self.compose.paths.work.buildinstall_dir(
|
||||||
|
arch, allow_topdir_override=False
|
||||||
|
)
|
||||||
makedirs(final_output_dir)
|
makedirs(final_output_dir)
|
||||||
repo_baseurls = self.get_repos(arch)
|
repo_baseurls = self.get_repos(arch)
|
||||||
if final_output_dir != output_dir:
|
if final_output_dir != output_dir:
|
||||||
@ -194,40 +213,58 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
if self.buildinstall_method == "lorax":
|
if self.buildinstall_method == "lorax":
|
||||||
|
|
||||||
buildarch = get_valid_arches(arch)[0]
|
buildarch = get_valid_arches(arch)[0]
|
||||||
for variant in self.compose.get_variants(arch=arch, types=['variant']):
|
for variant in self.compose.get_variants(arch=arch, types=["variant"]):
|
||||||
if variant.is_empty:
|
if variant.is_empty:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
skip = get_arch_variant_data(self.compose.conf, "buildinstall_skip", arch, variant)
|
skip = get_arch_variant_data(
|
||||||
|
self.compose.conf, "buildinstall_skip", arch, variant
|
||||||
|
)
|
||||||
if skip == [True]:
|
if skip == [True]:
|
||||||
self.compose.log_info(
|
self.compose.log_info(
|
||||||
'Skipping buildinstall for %s.%s due to config option' % (variant, arch))
|
"Skipping buildinstall for %s.%s due to config option"
|
||||||
|
% (variant, arch)
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
volid = get_volid(self.compose, arch, variant=variant, disc_type=disc_type)
|
volid = get_volid(
|
||||||
|
self.compose, arch, variant=variant, disc_type=disc_type
|
||||||
|
)
|
||||||
commands.append(
|
commands.append(
|
||||||
(
|
(
|
||||||
variant,
|
variant,
|
||||||
self._get_lorax_cmd(
|
self._get_lorax_cmd(
|
||||||
repo_baseurls, output_dir, variant, arch, buildarch, volid, final_output_dir
|
repo_baseurls,
|
||||||
|
output_dir,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
buildarch,
|
||||||
|
volid,
|
||||||
|
final_output_dir,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif self.buildinstall_method == "buildinstall":
|
elif self.buildinstall_method == "buildinstall":
|
||||||
volid = get_volid(self.compose, arch, disc_type=disc_type)
|
volid = get_volid(self.compose, arch, disc_type=disc_type)
|
||||||
commands.append(
|
commands.append(
|
||||||
(None,
|
(
|
||||||
lorax.get_buildinstall_cmd(product,
|
None,
|
||||||
|
lorax.get_buildinstall_cmd(
|
||||||
|
product,
|
||||||
version,
|
version,
|
||||||
release,
|
release,
|
||||||
repo_baseurls,
|
repo_baseurls,
|
||||||
output_dir,
|
output_dir,
|
||||||
is_final=self.compose.supported,
|
is_final=self.compose.supported,
|
||||||
buildarch=arch,
|
buildarch=arch,
|
||||||
volid=volid))
|
volid=volid,
|
||||||
|
),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unsupported buildinstall method: %s" % self.buildinstall_method)
|
raise ValueError(
|
||||||
|
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
||||||
|
)
|
||||||
|
|
||||||
for (variant, cmd) in commands:
|
for (variant, cmd) in commands:
|
||||||
self.pool.add(BuildinstallThread(self.pool))
|
self.pool.add(BuildinstallThread(self.pool))
|
||||||
@ -239,8 +276,11 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
# If the phase is skipped, we can treat it as successful. Either there
|
# If the phase is skipped, we can treat it as successful. Either there
|
||||||
# will be no output, or it's a debug run of compose where anything can
|
# will be no output, or it's a debug run of compose where anything can
|
||||||
# happen.
|
# happen.
|
||||||
return (super(BuildinstallPhase, self).skip()
|
return (
|
||||||
or (variant.uid if self.used_lorax else None, arch) in self.pool.finished_tasks)
|
super(BuildinstallPhase, self).skip()
|
||||||
|
or (variant.uid if self.used_lorax else None, arch)
|
||||||
|
in self.pool.finished_tasks
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_kickstart_file(compose):
|
def get_kickstart_file(compose):
|
||||||
@ -296,7 +336,7 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
os.unlink(config_path) # break hadlink by removing file writing a new one
|
os.unlink(config_path) # break hadlink by removing file writing a new one
|
||||||
|
|
||||||
# double-escape volid in yaboot.conf
|
# double-escape volid in yaboot.conf
|
||||||
new_volid = volid_escaped_2 if 'yaboot' in config else volid_escaped
|
new_volid = volid_escaped_2 if "yaboot" in config else volid_escaped
|
||||||
|
|
||||||
ks = (" ks=hd:LABEL=%s:/ks.cfg" % new_volid) if ks_file else ""
|
ks = (" ks=hd:LABEL=%s:/ks.cfg" % new_volid) if ks_file else ""
|
||||||
|
|
||||||
@ -310,7 +350,7 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
if logger and data != original_data:
|
if logger and data != original_data:
|
||||||
logger.info('Boot config %s changed' % config_path)
|
logger.info("Boot config %s changed" % config_path)
|
||||||
|
|
||||||
return found_configs
|
return found_configs
|
||||||
|
|
||||||
@ -319,7 +359,9 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
# * it's quite trivial to replace volids
|
# * it's quite trivial to replace volids
|
||||||
# * it's not easy to replace menu titles
|
# * it's not easy to replace menu titles
|
||||||
# * we probably need to get this into lorax
|
# * we probably need to get this into lorax
|
||||||
def tweak_buildinstall(compose, src, dst, arch, variant, label, volid, kickstart_file=None):
|
def tweak_buildinstall(
|
||||||
|
compose, src, dst, arch, variant, label, volid, kickstart_file=None
|
||||||
|
):
|
||||||
tmp_dir = compose.mkdtemp(prefix="tweak_buildinstall_")
|
tmp_dir = compose.mkdtemp(prefix="tweak_buildinstall_")
|
||||||
|
|
||||||
# verify src
|
# verify src
|
||||||
@ -336,11 +378,14 @@ def tweak_buildinstall(compose, src, dst, arch, variant, label, volid, kickstart
|
|||||||
# copy src to temp
|
# copy src to temp
|
||||||
# TODO: place temp on the same device as buildinstall dir so we can hardlink
|
# TODO: place temp on the same device as buildinstall dir so we can hardlink
|
||||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||||
shlex_quote(src), shlex_quote(tmp_dir)
|
shlex_quote(src),
|
||||||
|
shlex_quote(tmp_dir),
|
||||||
)
|
)
|
||||||
run(cmd)
|
run(cmd)
|
||||||
|
|
||||||
found_configs = tweak_configs(tmp_dir, volid, kickstart_file, logger=compose._logger)
|
found_configs = tweak_configs(
|
||||||
|
tmp_dir, volid, kickstart_file, logger=compose._logger
|
||||||
|
)
|
||||||
if kickstart_file and found_configs:
|
if kickstart_file and found_configs:
|
||||||
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
||||||
|
|
||||||
@ -351,15 +396,23 @@ def tweak_buildinstall(compose, src, dst, arch, variant, label, volid, kickstart
|
|||||||
if not os.path.isfile(image):
|
if not os.path.isfile(image):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with iso.mount(image, logger=compose._logger,
|
with iso.mount(
|
||||||
use_guestmount=compose.conf.get("buildinstall_use_guestmount")
|
image,
|
||||||
|
logger=compose._logger,
|
||||||
|
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
||||||
) as mount_tmp_dir:
|
) as mount_tmp_dir:
|
||||||
for config in BOOT_CONFIGS:
|
for config in BOOT_CONFIGS:
|
||||||
config_path = os.path.join(tmp_dir, config)
|
config_path = os.path.join(tmp_dir, config)
|
||||||
config_in_image = os.path.join(mount_tmp_dir, config)
|
config_in_image = os.path.join(mount_tmp_dir, config)
|
||||||
|
|
||||||
if os.path.isfile(config_in_image):
|
if os.path.isfile(config_in_image):
|
||||||
cmd = ["cp", "-v", "--remove-destination", config_path, config_in_image]
|
cmd = [
|
||||||
|
"cp",
|
||||||
|
"-v",
|
||||||
|
"--remove-destination",
|
||||||
|
config_path,
|
||||||
|
config_in_image,
|
||||||
|
]
|
||||||
run(cmd)
|
run(cmd)
|
||||||
|
|
||||||
# HACK: make buildinstall files world readable
|
# HACK: make buildinstall files world readable
|
||||||
@ -367,7 +420,8 @@ def tweak_buildinstall(compose, src, dst, arch, variant, label, volid, kickstart
|
|||||||
|
|
||||||
# copy temp to dst
|
# copy temp to dst
|
||||||
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
cmd = "cp -dRv --preserve=mode,links,timestamps --remove-destination %s/* %s/" % (
|
||||||
shlex_quote(tmp_dir), shlex_quote(dst)
|
shlex_quote(tmp_dir),
|
||||||
|
shlex_quote(dst),
|
||||||
)
|
)
|
||||||
run(cmd)
|
run(cmd)
|
||||||
|
|
||||||
@ -378,7 +432,7 @@ def link_boot_iso(compose, arch, variant, can_fail):
|
|||||||
if arch == "src":
|
if arch == "src":
|
||||||
return
|
return
|
||||||
|
|
||||||
disc_type = compose.conf['disc_types'].get('boot', 'boot')
|
disc_type = compose.conf["disc_types"].get("boot", "boot")
|
||||||
|
|
||||||
symlink_isos_to = compose.conf.get("symlink_isos_to")
|
symlink_isos_to = compose.conf.get("symlink_isos_to")
|
||||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||||
@ -388,14 +442,15 @@ def link_boot_iso(compose, arch, variant, can_fail):
|
|||||||
return
|
return
|
||||||
|
|
||||||
msg = "Linking boot.iso (arch: %s, variant: %s)" % (arch, variant)
|
msg = "Linking boot.iso (arch: %s, variant: %s)" % (arch, variant)
|
||||||
filename = compose.get_image_name(arch, variant, disc_type=disc_type,
|
filename = compose.get_image_name(
|
||||||
disc_num=None, suffix=".iso")
|
arch, variant, disc_type=disc_type, disc_num=None, suffix=".iso"
|
||||||
new_boot_iso_path = compose.paths.compose.iso_path(arch, variant, filename,
|
)
|
||||||
symlink_to=symlink_isos_to)
|
new_boot_iso_path = compose.paths.compose.iso_path(
|
||||||
new_boot_iso_relative_path = compose.paths.compose.iso_path(arch,
|
arch, variant, filename, symlink_to=symlink_isos_to
|
||||||
variant,
|
)
|
||||||
filename,
|
new_boot_iso_relative_path = compose.paths.compose.iso_path(
|
||||||
relative=True)
|
arch, variant, filename, relative=True
|
||||||
|
)
|
||||||
if os.path.exists(new_boot_iso_path):
|
if os.path.exists(new_boot_iso_path):
|
||||||
# TODO: log
|
# TODO: log
|
||||||
compose.log_warning("[SKIP ] %s" % msg)
|
compose.log_warning("[SKIP ] %s" % msg)
|
||||||
@ -427,8 +482,8 @@ def link_boot_iso(compose, arch, variant, can_fail):
|
|||||||
img.bootable = True
|
img.bootable = True
|
||||||
img.subvariant = variant.uid
|
img.subvariant = variant.uid
|
||||||
img.implant_md5 = implant_md5
|
img.implant_md5 = implant_md5
|
||||||
setattr(img, 'can_fail', can_fail)
|
setattr(img, "can_fail", can_fail)
|
||||||
setattr(img, 'deliverable', 'buildinstall')
|
setattr(img, "deliverable", "buildinstall")
|
||||||
try:
|
try:
|
||||||
img.volume_id = iso.get_volume_id(new_boot_iso_path)
|
img.volume_id = iso.get_volume_id(new_boot_iso_path)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
@ -441,28 +496,33 @@ class BuildinstallThread(WorkerThread):
|
|||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
# The variant is None unless lorax is used as buildinstall method.
|
# The variant is None unless lorax is used as buildinstall method.
|
||||||
compose, arch, variant, cmd = item
|
compose, arch, variant, cmd = item
|
||||||
can_fail = compose.can_fail(variant, arch, 'buildinstall')
|
can_fail = compose.can_fail(variant, arch, "buildinstall")
|
||||||
with failable(compose, can_fail, variant, arch, 'buildinstall'):
|
with failable(compose, can_fail, variant, arch, "buildinstall"):
|
||||||
self.worker(compose, arch, variant, cmd, num)
|
self.worker(compose, arch, variant, cmd, num)
|
||||||
|
|
||||||
def worker(self, compose, arch, variant, cmd, num):
|
def worker(self, compose, arch, variant, cmd, num):
|
||||||
buildinstall_method = compose.conf["buildinstall_method"]
|
buildinstall_method = compose.conf["buildinstall_method"]
|
||||||
lorax_use_koji_plugin = compose.conf["lorax_use_koji_plugin"]
|
lorax_use_koji_plugin = compose.conf["lorax_use_koji_plugin"]
|
||||||
log_filename = ('buildinstall-%s' % variant.uid) if variant else 'buildinstall'
|
log_filename = ("buildinstall-%s" % variant.uid) if variant else "buildinstall"
|
||||||
log_file = compose.paths.log.log_file(arch, log_filename)
|
log_file = compose.paths.log.log_file(arch, log_filename)
|
||||||
|
|
||||||
msg = "Running buildinstall for arch %s, variant %s" % (arch, variant)
|
msg = "Running buildinstall for arch %s, variant %s" % (arch, variant)
|
||||||
|
|
||||||
output_dir = compose.paths.work.buildinstall_dir(
|
output_dir = compose.paths.work.buildinstall_dir(
|
||||||
arch, allow_topdir_override=True, variant=variant)
|
arch, allow_topdir_override=True, variant=variant
|
||||||
final_output_dir = compose.paths.work.buildinstall_dir(
|
)
|
||||||
arch, variant=variant)
|
final_output_dir = compose.paths.work.buildinstall_dir(arch, variant=variant)
|
||||||
|
|
||||||
if (os.path.isdir(output_dir) and os.listdir(output_dir) or
|
if (
|
||||||
os.path.isdir(final_output_dir) and os.listdir(final_output_dir)):
|
os.path.isdir(output_dir)
|
||||||
|
and os.listdir(output_dir)
|
||||||
|
or os.path.isdir(final_output_dir)
|
||||||
|
and os.listdir(final_output_dir)
|
||||||
|
):
|
||||||
# output dir is *not* empty -> SKIP
|
# output dir is *not* empty -> SKIP
|
||||||
self.pool.log_warning(
|
self.pool.log_warning(
|
||||||
'[SKIP ] Buildinstall for arch %s, variant %s' % (arch, variant))
|
"[SKIP ] Buildinstall for arch %s, variant %s" % (arch, variant)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
@ -485,15 +545,21 @@ class BuildinstallThread(WorkerThread):
|
|||||||
runroot = Runroot(compose, phase="buildinstall")
|
runroot = Runroot(compose, phase="buildinstall")
|
||||||
if buildinstall_method == "lorax" and lorax_use_koji_plugin:
|
if buildinstall_method == "lorax" and lorax_use_koji_plugin:
|
||||||
runroot.run_pungi_buildinstall(
|
runroot.run_pungi_buildinstall(
|
||||||
cmd, log_file=log_file, arch=arch, packages=packages,
|
cmd,
|
||||||
|
log_file=log_file,
|
||||||
|
arch=arch,
|
||||||
|
packages=packages,
|
||||||
mounts=[compose.topdir],
|
mounts=[compose.topdir],
|
||||||
weight=compose.conf['runroot_weights'].get('buildinstall'),
|
weight=compose.conf["runroot_weights"].get("buildinstall"),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
runroot.run(
|
runroot.run(
|
||||||
cmd, log_file=log_file, arch=arch, packages=packages,
|
cmd,
|
||||||
|
log_file=log_file,
|
||||||
|
arch=arch,
|
||||||
|
packages=packages,
|
||||||
mounts=[compose.topdir],
|
mounts=[compose.topdir],
|
||||||
weight=compose.conf['runroot_weights'].get('buildinstall'),
|
weight=compose.conf["runroot_weights"].get("buildinstall"),
|
||||||
chown_paths=chown_paths,
|
chown_paths=chown_paths,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -504,14 +570,14 @@ class BuildinstallThread(WorkerThread):
|
|||||||
copy_all(results_dir, final_output_dir)
|
copy_all(results_dir, final_output_dir)
|
||||||
|
|
||||||
# Get the log_dir into which we should copy the resulting log files.
|
# Get the log_dir into which we should copy the resulting log files.
|
||||||
log_fname = 'buildinstall-%s-logs/dummy' % variant.uid
|
log_fname = "buildinstall-%s-logs/dummy" % variant.uid
|
||||||
final_log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
final_log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_fname))
|
||||||
if not os.path.exists(final_log_dir):
|
if not os.path.exists(final_log_dir):
|
||||||
makedirs(final_log_dir)
|
makedirs(final_log_dir)
|
||||||
log_dir = os.path.join(output_dir, "logs")
|
log_dir = os.path.join(output_dir, "logs")
|
||||||
copy_all(log_dir, final_log_dir)
|
copy_all(log_dir, final_log_dir)
|
||||||
|
|
||||||
log_file = compose.paths.log.log_file(arch, log_filename + '-RPMs')
|
log_file = compose.paths.log.log_file(arch, log_filename + "-RPMs")
|
||||||
rpms = runroot.get_buildroot_rpms()
|
rpms = runroot.get_buildroot_rpms()
|
||||||
with open(log_file, "w") as f:
|
with open(log_file, "w") as f:
|
||||||
f.write("\n".join(rpms))
|
f.write("\n".join(rpms))
|
||||||
@ -523,7 +589,7 @@ class BuildinstallThread(WorkerThread):
|
|||||||
self.pool.log_info("[DONE ] %s" % msg)
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
def copy_files(self, compose, variant, arch):
|
def copy_files(self, compose, variant, arch):
|
||||||
disc_type = compose.conf['disc_types'].get('dvd', 'dvd')
|
disc_type = compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
|
|
||||||
buildinstall_dir = compose.paths.work.buildinstall_dir(arch)
|
buildinstall_dir = compose.paths.work.buildinstall_dir(arch)
|
||||||
|
|
||||||
@ -533,13 +599,17 @@ class BuildinstallThread(WorkerThread):
|
|||||||
buildinstall_dir = os.path.join(buildinstall_dir, variant.uid)
|
buildinstall_dir = os.path.join(buildinstall_dir, variant.uid)
|
||||||
|
|
||||||
# Find all relevant variants if lorax is not used.
|
# Find all relevant variants if lorax is not used.
|
||||||
variants = [variant] if variant else compose.get_variants(arch=arch, types=["self", "variant"])
|
variants = (
|
||||||
|
[variant]
|
||||||
|
if variant
|
||||||
|
else compose.get_variants(arch=arch, types=["self", "variant"])
|
||||||
|
)
|
||||||
for var in variants:
|
for var in variants:
|
||||||
os_tree = compose.paths.compose.os_tree(arch, var)
|
os_tree = compose.paths.compose.os_tree(arch, var)
|
||||||
# TODO: label is not used
|
# TODO: label is not used
|
||||||
label = ""
|
label = ""
|
||||||
volid = get_volid(compose, arch, var, disc_type=disc_type)
|
volid = get_volid(compose, arch, var, disc_type=disc_type)
|
||||||
can_fail = compose.can_fail(var, arch, 'buildinstall')
|
can_fail = compose.can_fail(var, arch, "buildinstall")
|
||||||
tweak_buildinstall(
|
tweak_buildinstall(
|
||||||
compose,
|
compose,
|
||||||
buildinstall_dir,
|
buildinstall_dir,
|
||||||
@ -565,7 +635,7 @@ def _get_log_dir(compose, variant, arch):
|
|||||||
|
|
||||||
# The paths module will modify the filename (by inserting arch). But we
|
# The paths module will modify the filename (by inserting arch). But we
|
||||||
# only care about the directory anyway.
|
# only care about the directory anyway.
|
||||||
log_filename = 'buildinstall-%s-logs/dummy' % variant.uid
|
log_filename = "buildinstall-%s-logs/dummy" % variant.uid
|
||||||
log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_filename))
|
log_dir = os.path.dirname(compose.paths.log.log_file(arch, log_filename))
|
||||||
makedirs(log_dir)
|
makedirs(log_dir)
|
||||||
return log_dir
|
return log_dir
|
||||||
|
@ -29,8 +29,14 @@ from pungi.wrappers import iso
|
|||||||
from pungi.wrappers.createrepo import CreaterepoWrapper
|
from pungi.wrappers.createrepo import CreaterepoWrapper
|
||||||
from pungi.wrappers import kojiwrapper
|
from pungi.wrappers import kojiwrapper
|
||||||
from pungi.phases.base import PhaseBase, PhaseLoggerMixin
|
from pungi.phases.base import PhaseBase, PhaseLoggerMixin
|
||||||
from pungi.util import (makedirs, get_volid, get_arch_variant_data, failable,
|
from pungi.util import (
|
||||||
get_file_size, get_mtime)
|
makedirs,
|
||||||
|
get_volid,
|
||||||
|
get_arch_variant_data,
|
||||||
|
failable,
|
||||||
|
get_file_size,
|
||||||
|
get_mtime,
|
||||||
|
)
|
||||||
from pungi.media_split import MediaSplitter, convert_media_size
|
from pungi.media_split import MediaSplitter, convert_media_size
|
||||||
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
|
from pungi.compose_metadata.discinfo import read_discinfo, write_discinfo
|
||||||
from pungi.runroot import Runroot
|
from pungi.runroot import Runroot
|
||||||
@ -59,31 +65,42 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
return False
|
return False
|
||||||
if variant.type != "variant":
|
if variant.type != "variant":
|
||||||
return False
|
return False
|
||||||
skip = get_arch_variant_data(self.compose.conf, "buildinstall_skip", arch, variant)
|
skip = get_arch_variant_data(
|
||||||
|
self.compose.conf, "buildinstall_skip", arch, variant
|
||||||
|
)
|
||||||
if skip == [True]:
|
if skip == [True]:
|
||||||
# Buildinstall is skipped for this tree. Can't create a bootable ISO.
|
# Buildinstall is skipped for this tree. Can't create a bootable ISO.
|
||||||
return False
|
return False
|
||||||
return bool(self.compose.conf.get('buildinstall_method', ''))
|
return bool(self.compose.conf.get("buildinstall_method", ""))
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
|
symlink_isos_to = self.compose.conf.get("symlink_isos_to")
|
||||||
disc_type = self.compose.conf['disc_types'].get('dvd', 'dvd')
|
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
deliverables = []
|
deliverables = []
|
||||||
|
|
||||||
commands = []
|
commands = []
|
||||||
for variant in self.compose.get_variants(types=["variant", "layered-product", "optional"]):
|
for variant in self.compose.get_variants(
|
||||||
|
types=["variant", "layered-product", "optional"]
|
||||||
|
):
|
||||||
if variant.is_empty:
|
if variant.is_empty:
|
||||||
continue
|
continue
|
||||||
for arch in variant.arches + ["src"]:
|
for arch in variant.arches + ["src"]:
|
||||||
skip_iso = get_arch_variant_data(self.compose.conf, "createiso_skip", arch, variant)
|
skip_iso = get_arch_variant_data(
|
||||||
|
self.compose.conf, "createiso_skip", arch, variant
|
||||||
|
)
|
||||||
if skip_iso == [True]:
|
if skip_iso == [True]:
|
||||||
self.logger.info("Skipping createiso for %s.%s due to config option" % (variant, arch))
|
self.logger.info(
|
||||||
|
"Skipping createiso for %s.%s due to config option"
|
||||||
|
% (variant, arch)
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
volid = get_volid(self.compose, arch, variant, disc_type=disc_type)
|
volid = get_volid(self.compose, arch, variant, disc_type=disc_type)
|
||||||
os_tree = self.compose.paths.compose.os_tree(arch, variant)
|
os_tree = self.compose.paths.compose.os_tree(arch, variant)
|
||||||
|
|
||||||
iso_dir = self.compose.paths.compose.iso_dir(arch, variant, symlink_to=symlink_isos_to)
|
iso_dir = self.compose.paths.compose.iso_dir(
|
||||||
|
arch, variant, symlink_to=symlink_isos_to
|
||||||
|
)
|
||||||
if not iso_dir:
|
if not iso_dir:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -97,21 +114,25 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
|
|
||||||
if bootable and not self.bi.succeeded(variant, arch):
|
if bootable and not self.bi.succeeded(variant, arch):
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
'ISO should be bootable, but buildinstall failed. Skipping for %s.%s'
|
"ISO should be bootable, but buildinstall failed. Skipping for %s.%s"
|
||||||
% (variant, arch))
|
% (variant, arch)
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
split_iso_data = split_iso(self.compose, arch, variant, no_split=bootable,
|
split_iso_data = split_iso(
|
||||||
logger=self.logger)
|
self.compose, arch, variant, no_split=bootable, logger=self.logger
|
||||||
|
)
|
||||||
disc_count = len(split_iso_data)
|
disc_count = len(split_iso_data)
|
||||||
|
|
||||||
for disc_num, iso_data in enumerate(split_iso_data):
|
for disc_num, iso_data in enumerate(split_iso_data):
|
||||||
disc_num += 1
|
disc_num += 1
|
||||||
|
|
||||||
filename = self.compose.get_image_name(
|
filename = self.compose.get_image_name(
|
||||||
arch, variant, disc_type=disc_type, disc_num=disc_num)
|
arch, variant, disc_type=disc_type, disc_num=disc_num
|
||||||
|
)
|
||||||
iso_path = self.compose.paths.compose.iso_path(
|
iso_path = self.compose.paths.compose.iso_path(
|
||||||
arch, variant, filename, symlink_to=symlink_isos_to)
|
arch, variant, filename, symlink_to=symlink_isos_to
|
||||||
|
)
|
||||||
if os.path.isfile(iso_path):
|
if os.path.isfile(iso_path):
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
"Skipping mkisofs, image already exists: %s", iso_path
|
"Skipping mkisofs, image already exists: %s", iso_path
|
||||||
@ -119,9 +140,14 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
continue
|
continue
|
||||||
deliverables.append(iso_path)
|
deliverables.append(iso_path)
|
||||||
|
|
||||||
graft_points = prepare_iso(self.compose, arch, variant,
|
graft_points = prepare_iso(
|
||||||
disc_num=disc_num, disc_count=disc_count,
|
self.compose,
|
||||||
split_iso_data=iso_data)
|
arch,
|
||||||
|
variant,
|
||||||
|
disc_num=disc_num,
|
||||||
|
disc_count=disc_count,
|
||||||
|
split_iso_data=iso_data,
|
||||||
|
)
|
||||||
|
|
||||||
cmd = {
|
cmd = {
|
||||||
"iso_path": iso_path,
|
"iso_path": iso_path,
|
||||||
@ -133,8 +159,9 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if os.path.islink(iso_dir):
|
if os.path.islink(iso_dir):
|
||||||
cmd["mount"] = os.path.abspath(os.path.join(os.path.dirname(iso_dir),
|
cmd["mount"] = os.path.abspath(
|
||||||
os.readlink(iso_dir)))
|
os.path.join(os.path.dirname(iso_dir), os.readlink(iso_dir))
|
||||||
|
)
|
||||||
|
|
||||||
opts = createiso.CreateIsoOpts(
|
opts = createiso.CreateIsoOpts(
|
||||||
output_dir=iso_dir,
|
output_dir=iso_dir,
|
||||||
@ -147,21 +174,25 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if bootable:
|
if bootable:
|
||||||
opts = opts._replace(buildinstall_method=self.compose.conf['buildinstall_method'])
|
opts = opts._replace(
|
||||||
|
buildinstall_method=self.compose.conf["buildinstall_method"]
|
||||||
|
)
|
||||||
|
|
||||||
if self.compose.conf['create_jigdo']:
|
if self.compose.conf["create_jigdo"]:
|
||||||
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
|
jigdo_dir = self.compose.paths.compose.jigdo_dir(arch, variant)
|
||||||
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
||||||
|
|
||||||
script_file = os.path.join(self.compose.paths.work.tmp_dir(arch, variant),
|
script_file = os.path.join(
|
||||||
'createiso-%s.sh' % filename)
|
self.compose.paths.work.tmp_dir(arch, variant),
|
||||||
with open(script_file, 'w') as f:
|
"createiso-%s.sh" % filename,
|
||||||
|
)
|
||||||
|
with open(script_file, "w") as f:
|
||||||
createiso.write_script(opts, f)
|
createiso.write_script(opts, f)
|
||||||
cmd['cmd'] = ['bash', script_file]
|
cmd["cmd"] = ["bash", script_file]
|
||||||
commands.append((cmd, variant, arch))
|
commands.append((cmd, variant, arch))
|
||||||
|
|
||||||
if self.compose.notifier:
|
if self.compose.notifier:
|
||||||
self.compose.notifier.send('createiso-targets', deliverables=deliverables)
|
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
||||||
|
|
||||||
for (cmd, variant, arch) in commands:
|
for (cmd, variant, arch) in commands:
|
||||||
self.pool.add(CreateIsoThread(self.pool))
|
self.pool.add(CreateIsoThread(self.pool))
|
||||||
@ -180,15 +211,19 @@ class CreateIsoThread(WorkerThread):
|
|||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
if compose.notifier:
|
if compose.notifier:
|
||||||
compose.notifier.send('createiso-imagefail',
|
compose.notifier.send(
|
||||||
file=cmd['iso_path'],
|
"createiso-imagefail",
|
||||||
|
file=cmd["iso_path"],
|
||||||
arch=arch,
|
arch=arch,
|
||||||
variant=str(variant))
|
variant=str(variant),
|
||||||
|
)
|
||||||
|
|
||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
compose, cmd, variant, arch = item
|
compose, cmd, variant, arch = item
|
||||||
can_fail = compose.can_fail(variant, arch, 'iso')
|
can_fail = compose.can_fail(variant, arch, "iso")
|
||||||
with failable(compose, can_fail, variant, arch, 'iso', logger=self.pool._logger):
|
with failable(
|
||||||
|
compose, can_fail, variant, arch, "iso", logger=self.pool._logger
|
||||||
|
):
|
||||||
self.worker(compose, cmd, variant, arch, num)
|
self.worker(compose, cmd, variant, arch, num)
|
||||||
|
|
||||||
def worker(self, compose, cmd, variant, arch, num):
|
def worker(self, compose, cmd, variant, arch, num):
|
||||||
@ -196,23 +231,35 @@ class CreateIsoThread(WorkerThread):
|
|||||||
if "mount" in cmd:
|
if "mount" in cmd:
|
||||||
mounts.append(cmd["mount"])
|
mounts.append(cmd["mount"])
|
||||||
|
|
||||||
bootable = cmd['bootable']
|
bootable = cmd["bootable"]
|
||||||
log_file = compose.paths.log.log_file(
|
log_file = compose.paths.log.log_file(
|
||||||
arch, "createiso-%s" % os.path.basename(cmd["iso_path"]))
|
arch, "createiso-%s" % os.path.basename(cmd["iso_path"])
|
||||||
|
)
|
||||||
|
|
||||||
msg = "Creating ISO (arch: %s, variant: %s): %s" % (
|
msg = "Creating ISO (arch: %s, variant: %s): %s" % (
|
||||||
arch, variant, os.path.basename(cmd["iso_path"]))
|
arch,
|
||||||
|
variant,
|
||||||
|
os.path.basename(cmd["iso_path"]),
|
||||||
|
)
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
run_createiso_command(num, compose, bootable, arch,
|
run_createiso_command(
|
||||||
cmd['cmd'], mounts, log_file)
|
num, compose, bootable, arch, cmd["cmd"], mounts, log_file
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
self.fail(compose, cmd, variant, arch)
|
self.fail(compose, cmd, variant, arch)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
add_iso_to_metadata(compose, variant, arch, cmd["iso_path"],
|
add_iso_to_metadata(
|
||||||
cmd["bootable"], cmd["disc_num"], cmd["disc_count"])
|
compose,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
cmd["iso_path"],
|
||||||
|
cmd["bootable"],
|
||||||
|
cmd["disc_num"],
|
||||||
|
cmd["disc_count"],
|
||||||
|
)
|
||||||
|
|
||||||
# Delete staging directory if present.
|
# Delete staging directory if present.
|
||||||
staging_dir = compose.paths.work.iso_staging_dir(
|
staging_dir = compose.paths.work.iso_staging_dir(
|
||||||
@ -223,10 +270,12 @@ class CreateIsoThread(WorkerThread):
|
|||||||
|
|
||||||
self.pool.log_info("[DONE ] %s" % msg)
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
if compose.notifier:
|
if compose.notifier:
|
||||||
compose.notifier.send('createiso-imagedone',
|
compose.notifier.send(
|
||||||
file=cmd['iso_path'],
|
"createiso-imagedone",
|
||||||
|
file=cmd["iso_path"],
|
||||||
arch=arch,
|
arch=arch,
|
||||||
variant=str(variant))
|
variant=str(variant),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def add_iso_to_metadata(
|
def add_iso_to_metadata(
|
||||||
@ -240,7 +289,7 @@ def add_iso_to_metadata(
|
|||||||
additional_variants=None,
|
additional_variants=None,
|
||||||
):
|
):
|
||||||
img = Image(compose.im)
|
img = Image(compose.im)
|
||||||
img.path = iso_path.replace(compose.paths.compose.topdir(), '').lstrip('/')
|
img.path = iso_path.replace(compose.paths.compose.topdir(), "").lstrip("/")
|
||||||
img.mtime = get_mtime(iso_path)
|
img.mtime = get_mtime(iso_path)
|
||||||
img.size = get_file_size(iso_path)
|
img.size = get_file_size(iso_path)
|
||||||
img.arch = arch
|
img.arch = arch
|
||||||
@ -255,8 +304,8 @@ def add_iso_to_metadata(
|
|||||||
if additional_variants:
|
if additional_variants:
|
||||||
img.unified = True
|
img.unified = True
|
||||||
img.additional_variants = additional_variants
|
img.additional_variants = additional_variants
|
||||||
setattr(img, 'can_fail', compose.can_fail(variant, arch, 'iso'))
|
setattr(img, "can_fail", compose.can_fail(variant, arch, "iso"))
|
||||||
setattr(img, 'deliverable', 'iso')
|
setattr(img, "deliverable", "iso")
|
||||||
try:
|
try:
|
||||||
img.volume_id = iso.get_volume_id(iso_path)
|
img.volume_id = iso.get_volume_id(iso_path)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
@ -269,15 +318,16 @@ def add_iso_to_metadata(
|
|||||||
return img
|
return img
|
||||||
|
|
||||||
|
|
||||||
def run_createiso_command(num, compose, bootable, arch, cmd, mounts,
|
def run_createiso_command(
|
||||||
log_file, with_jigdo=True):
|
num, compose, bootable, arch, cmd, mounts, log_file, with_jigdo=True
|
||||||
|
):
|
||||||
packages = ["coreutils", "genisoimage", "isomd5sum"]
|
packages = ["coreutils", "genisoimage", "isomd5sum"]
|
||||||
if with_jigdo and compose.conf['create_jigdo']:
|
if with_jigdo and compose.conf["create_jigdo"]:
|
||||||
packages.append('jigdo')
|
packages.append("jigdo")
|
||||||
if bootable:
|
if bootable:
|
||||||
extra_packages = {
|
extra_packages = {
|
||||||
'lorax': ['lorax', 'which'],
|
"lorax": ["lorax", "which"],
|
||||||
'buildinstall': ['anaconda'],
|
"buildinstall": ["anaconda"],
|
||||||
}
|
}
|
||||||
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
|
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
|
||||||
|
|
||||||
@ -301,8 +351,13 @@ def run_createiso_command(num, compose, bootable, arch, cmd, mounts,
|
|||||||
build_arch = random.choice(tag_arches)
|
build_arch = random.choice(tag_arches)
|
||||||
|
|
||||||
runroot.run(
|
runroot.run(
|
||||||
cmd, log_file=log_file, arch=build_arch, packages=packages, mounts=mounts,
|
cmd,
|
||||||
weight=compose.conf['runroot_weights'].get('createiso'))
|
log_file=log_file,
|
||||||
|
arch=build_arch,
|
||||||
|
packages=packages,
|
||||||
|
mounts=mounts,
|
||||||
|
weight=compose.conf["runroot_weights"].get("createiso"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def split_iso(compose, arch, variant, no_split=False, logger=None):
|
def split_iso(compose, arch, variant, no_split=False, logger=None):
|
||||||
@ -318,8 +373,8 @@ def split_iso(compose, arch, variant, no_split=False, logger=None):
|
|||||||
"""
|
"""
|
||||||
if not logger:
|
if not logger:
|
||||||
logger = compose._logger
|
logger = compose._logger
|
||||||
media_size = compose.conf['iso_size']
|
media_size = compose.conf["iso_size"]
|
||||||
media_reserve = compose.conf['split_iso_reserve']
|
media_reserve = compose.conf["split_iso_reserve"]
|
||||||
split_size = convert_media_size(media_size) - convert_media_size(media_reserve)
|
split_size = convert_media_size(media_size) - convert_media_size(media_reserve)
|
||||||
real_size = None if no_split else split_size
|
real_size = None if no_split else split_size
|
||||||
|
|
||||||
@ -351,7 +406,9 @@ def split_iso(compose, arch, variant, no_split=False, logger=None):
|
|||||||
for root, dirs, files in os.walk(os_tree):
|
for root, dirs, files in os.walk(os_tree):
|
||||||
for dn in dirs[:]:
|
for dn in dirs[:]:
|
||||||
repo_dir = os.path.join(root, dn)
|
repo_dir = os.path.join(root, dn)
|
||||||
if repo_dir == os.path.join(compose.paths.compose.repository(arch, variant), "repodata"):
|
if repo_dir == os.path.join(
|
||||||
|
compose.paths.compose.repository(arch, variant), "repodata"
|
||||||
|
):
|
||||||
dirs.remove(dn)
|
dirs.remove(dn)
|
||||||
|
|
||||||
for fn in files:
|
for fn in files:
|
||||||
@ -369,17 +426,19 @@ def split_iso(compose, arch, variant, no_split=False, logger=None):
|
|||||||
for path, size, sticky in all_files + packages:
|
for path, size, sticky in all_files + packages:
|
||||||
ms.add_file(path, size, sticky)
|
ms.add_file(path, size, sticky)
|
||||||
|
|
||||||
logger.debug('Splitting media for %s.%s:' % (variant.uid, arch))
|
logger.debug("Splitting media for %s.%s:" % (variant.uid, arch))
|
||||||
result = ms.split()
|
result = ms.split()
|
||||||
if no_split and result[0]['size'] > split_size:
|
if no_split and result[0]["size"] > split_size:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"ISO for %s.%s does not fit on single media! It is %s bytes too big. (Total size: %s B)"
|
"ISO for %s.%s does not fit on single media! It is %s bytes too big. (Total size: %s B)"
|
||||||
% (variant.uid, arch, result[0]['size'] - split_size, result[0]['size'])
|
% (variant.uid, arch, result[0]["size"] - split_size, result[0]["size"])
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def prepare_iso(compose, arch, variant, disc_num=1, disc_count=None, split_iso_data=None):
|
def prepare_iso(
|
||||||
|
compose, arch, variant, disc_num=1, disc_count=None, split_iso_data=None
|
||||||
|
):
|
||||||
tree_dir = compose.paths.compose.os_tree(arch, variant)
|
tree_dir = compose.paths.compose.os_tree(arch, variant)
|
||||||
filename = compose.get_image_name(arch, variant, disc_num=disc_num)
|
filename = compose.get_image_name(arch, variant, disc_num=disc_num)
|
||||||
iso_dir = compose.paths.work.iso_dir(arch, filename)
|
iso_dir = compose.paths.work.iso_dir(arch, filename)
|
||||||
@ -428,7 +487,9 @@ def prepare_iso(compose, arch, variant, disc_num=1, disc_count=None, split_iso_d
|
|||||||
)
|
)
|
||||||
run(cmd)
|
run(cmd)
|
||||||
# add repodata/repomd.xml back to checksums
|
# add repodata/repomd.xml back to checksums
|
||||||
ti.checksums.add("repodata/repomd.xml", createrepo_checksum, root_dir=iso_dir)
|
ti.checksums.add(
|
||||||
|
"repodata/repomd.xml", createrepo_checksum, root_dir=iso_dir
|
||||||
|
)
|
||||||
|
|
||||||
new_ti_path = os.path.join(iso_dir, ".treeinfo")
|
new_ti_path = os.path.join(iso_dir, ".treeinfo")
|
||||||
ti.dump(new_ti_path)
|
ti.dump(new_ti_path)
|
||||||
@ -443,7 +504,9 @@ def prepare_iso(compose, arch, variant, disc_num=1, disc_count=None, split_iso_d
|
|||||||
if not disc_count or disc_count == 1:
|
if not disc_count or disc_count == 1:
|
||||||
data = iso.get_graft_points(compose, [tree_dir, iso_dir])
|
data = iso.get_graft_points(compose, [tree_dir, iso_dir])
|
||||||
else:
|
else:
|
||||||
data = iso.get_graft_points(compose, [iso._paths_from_list(tree_dir, split_iso_data["files"]), iso_dir])
|
data = iso.get_graft_points(
|
||||||
|
compose, [iso._paths_from_list(tree_dir, split_iso_data["files"]), iso_dir]
|
||||||
|
)
|
||||||
|
|
||||||
if compose.conf["createiso_break_hardlinks"]:
|
if compose.conf["createiso_break_hardlinks"]:
|
||||||
compose.log_debug(
|
compose.log_debug(
|
||||||
@ -458,7 +521,9 @@ def prepare_iso(compose, arch, variant, disc_num=1, disc_count=None, split_iso_d
|
|||||||
)
|
)
|
||||||
create_hardlinks(
|
create_hardlinks(
|
||||||
compose.paths.work.iso_staging_dir(arch, variant, filename),
|
compose.paths.work.iso_staging_dir(arch, variant, filename),
|
||||||
log_file=compose.paths.log.log_file(arch, "iso-hardlink-%s.log" % variant.uid),
|
log_file=compose.paths.log.log_file(
|
||||||
|
arch, "iso-hardlink-%s.log" % variant.uid
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: /content /graft-points
|
# TODO: /content /graft-points
|
||||||
|
@ -14,9 +14,7 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
__all__ = (
|
__all__ = ("create_variant_repo",)
|
||||||
"create_variant_repo",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
@ -56,18 +54,18 @@ class CreaterepoPhase(PhaseBase):
|
|||||||
def validate(self):
|
def validate(self):
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if not self.compose.old_composes and self.compose.conf.get('createrepo_deltas'):
|
if not self.compose.old_composes and self.compose.conf.get("createrepo_deltas"):
|
||||||
errors.append('Can not generate deltas without old compose')
|
errors.append("Can not generate deltas without old compose")
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise ValueError('\n'.join(errors))
|
raise ValueError("\n".join(errors))
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
get_productids_from_scm(self.compose)
|
get_productids_from_scm(self.compose)
|
||||||
reference_pkgset = None
|
reference_pkgset = None
|
||||||
if self.pkgset_phase and self.pkgset_phase.package_sets:
|
if self.pkgset_phase and self.pkgset_phase.package_sets:
|
||||||
reference_pkgset = self.pkgset_phase.package_sets[-1]
|
reference_pkgset = self.pkgset_phase.package_sets[-1]
|
||||||
for i in range(self.compose.conf['createrepo_num_threads']):
|
for i in range(self.compose.conf["createrepo_num_threads"]):
|
||||||
self.pool.add(
|
self.pool.add(
|
||||||
CreaterepoThread(self.pool, reference_pkgset, self.modules_metadata)
|
CreaterepoThread(self.pool, reference_pkgset, self.modules_metadata)
|
||||||
)
|
)
|
||||||
@ -87,18 +85,34 @@ class CreaterepoPhase(PhaseBase):
|
|||||||
self.modules_metadata.write_modules_metadata()
|
self.modules_metadata.write_modules_metadata()
|
||||||
|
|
||||||
|
|
||||||
def create_variant_repo(compose, arch, variant, pkg_type, pkgset, modules_metadata=None):
|
def create_variant_repo(
|
||||||
|
compose, arch, variant, pkg_type, pkgset, modules_metadata=None
|
||||||
|
):
|
||||||
types = {
|
types = {
|
||||||
'rpm': ('binary',
|
"rpm": (
|
||||||
lambda **kwargs: compose.paths.compose.repository(arch=arch, variant=variant, **kwargs)),
|
"binary",
|
||||||
'srpm': ('source',
|
lambda **kwargs: compose.paths.compose.repository(
|
||||||
lambda **kwargs: compose.paths.compose.repository(arch='src', variant=variant, **kwargs)),
|
arch=arch, variant=variant, **kwargs
|
||||||
'debuginfo': ('debug',
|
),
|
||||||
lambda **kwargs: compose.paths.compose.debug_repository(arch=arch, variant=variant, **kwargs)),
|
),
|
||||||
|
"srpm": (
|
||||||
|
"source",
|
||||||
|
lambda **kwargs: compose.paths.compose.repository(
|
||||||
|
arch="src", variant=variant, **kwargs
|
||||||
|
),
|
||||||
|
),
|
||||||
|
"debuginfo": (
|
||||||
|
"debug",
|
||||||
|
lambda **kwargs: compose.paths.compose.debug_repository(
|
||||||
|
arch=arch, variant=variant, **kwargs
|
||||||
|
),
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
if variant.is_empty or (arch is None and pkg_type != 'srpm'):
|
if variant.is_empty or (arch is None and pkg_type != "srpm"):
|
||||||
compose.log_info("[SKIP ] Creating repo (arch: %s, variant: %s): %s" % (arch, variant))
|
compose.log_info(
|
||||||
|
"[SKIP ] Creating repo (arch: %s, variant: %s): %s" % (arch, variant)
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
createrepo_c = compose.conf["createrepo_c"]
|
createrepo_c = compose.conf["createrepo_c"]
|
||||||
@ -128,7 +142,7 @@ def create_variant_repo(compose, arch, variant, pkg_type, pkgset, modules_metada
|
|||||||
compose.log_info("[BEGIN] %s" % msg)
|
compose.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
# We only want delta RPMs for binary repos.
|
# We only want delta RPMs for binary repos.
|
||||||
with_deltas = pkg_type == 'rpm' and _has_deltas(compose, variant, arch)
|
with_deltas = pkg_type == "rpm" and _has_deltas(compose, variant, arch)
|
||||||
|
|
||||||
rpms = set()
|
rpms = set()
|
||||||
rpm_nevras = set()
|
rpm_nevras = set()
|
||||||
@ -143,7 +157,7 @@ def create_variant_repo(compose, arch, variant, pkg_type, pkgset, modules_metada
|
|||||||
continue
|
continue
|
||||||
for srpm_data in data.values():
|
for srpm_data in data.values():
|
||||||
for rpm_nevra, rpm_data in srpm_data.items():
|
for rpm_nevra, rpm_data in srpm_data.items():
|
||||||
if types[pkg_type][0] != rpm_data['category']:
|
if types[pkg_type][0] != rpm_data["category"]:
|
||||||
continue
|
continue
|
||||||
path = os.path.join(compose.topdir, "compose", rpm_data["path"])
|
path = os.path.join(compose.topdir, "compose", rpm_data["path"])
|
||||||
rel_path = relative_path(path, repo_dir.rstrip("/") + "/")
|
rel_path = relative_path(path, repo_dir.rstrip("/") + "/")
|
||||||
@ -151,7 +165,7 @@ def create_variant_repo(compose, arch, variant, pkg_type, pkgset, modules_metada
|
|||||||
rpm_nevras.add(str(rpm_nevra))
|
rpm_nevras.add(str(rpm_nevra))
|
||||||
|
|
||||||
file_list = compose.paths.work.repo_package_list(arch, variant, pkg_type)
|
file_list = compose.paths.work.repo_package_list(arch, variant, pkg_type)
|
||||||
with open(file_list, 'w') as f:
|
with open(file_list, "w") as f:
|
||||||
for rel_path in sorted(rpms):
|
for rel_path in sorted(rpms):
|
||||||
f.write("%s\n" % rel_path)
|
f.write("%s\n" % rel_path)
|
||||||
|
|
||||||
@ -166,18 +180,25 @@ def create_variant_repo(compose, arch, variant, pkg_type, pkgset, modules_metada
|
|||||||
comps_path = None
|
comps_path = None
|
||||||
if compose.has_comps and pkg_type == "rpm":
|
if compose.has_comps and pkg_type == "rpm":
|
||||||
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
||||||
cmd = repo.get_createrepo_cmd(repo_dir, update=True,
|
cmd = repo.get_createrepo_cmd(
|
||||||
|
repo_dir,
|
||||||
|
update=True,
|
||||||
database=compose.should_create_yum_database,
|
database=compose.should_create_yum_database,
|
||||||
skip_stat=True,
|
skip_stat=True,
|
||||||
pkglist=file_list, outputdir=repo_dir,
|
pkglist=file_list,
|
||||||
|
outputdir=repo_dir,
|
||||||
workers=compose.conf["createrepo_num_workers"],
|
workers=compose.conf["createrepo_num_workers"],
|
||||||
groupfile=comps_path, update_md_path=repo_dir_arch,
|
groupfile=comps_path,
|
||||||
|
update_md_path=repo_dir_arch,
|
||||||
checksum=createrepo_checksum,
|
checksum=createrepo_checksum,
|
||||||
deltas=with_deltas,
|
deltas=with_deltas,
|
||||||
oldpackagedirs=old_package_dirs,
|
oldpackagedirs=old_package_dirs,
|
||||||
use_xz=compose.conf['createrepo_use_xz'],
|
use_xz=compose.conf["createrepo_use_xz"],
|
||||||
extra_args=compose.conf["createrepo_extra_args"])
|
extra_args=compose.conf["createrepo_extra_args"],
|
||||||
log_file = compose.paths.log.log_file(arch, "createrepo-%s.%s" % (variant, pkg_type))
|
)
|
||||||
|
log_file = compose.paths.log.log_file(
|
||||||
|
arch, "createrepo-%s.%s" % (variant, pkg_type)
|
||||||
|
)
|
||||||
run(cmd, logfile=log_file, show_cmd=True)
|
run(cmd, logfile=log_file, show_cmd=True)
|
||||||
|
|
||||||
# call modifyrepo to inject productid
|
# call modifyrepo to inject productid
|
||||||
@ -186,12 +207,16 @@ def create_variant_repo(compose, arch, variant, pkg_type, pkgset, modules_metada
|
|||||||
# add product certificate to base (rpm) repo; skip source and debug
|
# add product certificate to base (rpm) repo; skip source and debug
|
||||||
product_id_path = compose.paths.work.product_id(arch, variant)
|
product_id_path = compose.paths.work.product_id(arch, variant)
|
||||||
if os.path.isfile(product_id_path):
|
if os.path.isfile(product_id_path):
|
||||||
cmd = repo.get_modifyrepo_cmd(os.path.join(repo_dir, "repodata"), product_id_path, compress_type="gz")
|
cmd = repo.get_modifyrepo_cmd(
|
||||||
|
os.path.join(repo_dir, "repodata"), product_id_path, compress_type="gz"
|
||||||
|
)
|
||||||
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
|
log_file = compose.paths.log.log_file(arch, "modifyrepo-%s" % variant)
|
||||||
run(cmd, logfile=log_file, show_cmd=True)
|
run(cmd, logfile=log_file, show_cmd=True)
|
||||||
# productinfo is not supported by modifyrepo in any way
|
# productinfo is not supported by modifyrepo in any way
|
||||||
# this is a HACK to make CDN happy (dmach: at least I think, need to confirm with dgregor)
|
# this is a HACK to make CDN happy (dmach: at least I think, need to confirm with dgregor)
|
||||||
shutil.copy2(product_id_path, os.path.join(repo_dir, "repodata", "productid"))
|
shutil.copy2(
|
||||||
|
product_id_path, os.path.join(repo_dir, "repodata", "productid")
|
||||||
|
)
|
||||||
|
|
||||||
# call modifyrepo to inject modulemd if needed
|
# call modifyrepo to inject modulemd if needed
|
||||||
if pkg_type == "rpm" and arch in variant.arch_mmds and Modulemd is not None:
|
if pkg_type == "rpm" and arch in variant.arch_mmds and Modulemd is not None:
|
||||||
@ -217,7 +242,7 @@ def create_variant_repo(compose, arch, variant, pkg_type, pkgset, modules_metada
|
|||||||
for module_id, module_rpms in metadata:
|
for module_id, module_rpms in metadata:
|
||||||
modulemd_path = os.path.join(
|
modulemd_path = os.path.join(
|
||||||
types[pkg_type][1](relative=True),
|
types[pkg_type][1](relative=True),
|
||||||
find_file_in_repodata(repo_dir, 'modules'),
|
find_file_in_repodata(repo_dir, "modules"),
|
||||||
)
|
)
|
||||||
modules_metadata.prepare_module_metadata(
|
modules_metadata.prepare_module_metadata(
|
||||||
variant,
|
variant,
|
||||||
@ -246,18 +271,18 @@ def add_modular_metadata(repo, repo_path, mod_index, log_file):
|
|||||||
os.path.join(repo_path, "repodata"),
|
os.path.join(repo_path, "repodata"),
|
||||||
modules_path,
|
modules_path,
|
||||||
mdtype="modules",
|
mdtype="modules",
|
||||||
compress_type="gz"
|
compress_type="gz",
|
||||||
)
|
)
|
||||||
run(cmd, logfile=log_file, show_cmd=True)
|
run(cmd, logfile=log_file, show_cmd=True)
|
||||||
|
|
||||||
|
|
||||||
def find_file_in_repodata(repo_path, type_):
|
def find_file_in_repodata(repo_path, type_):
|
||||||
dom = xml.dom.minidom.parse(os.path.join(repo_path, 'repodata', 'repomd.xml'))
|
dom = xml.dom.minidom.parse(os.path.join(repo_path, "repodata", "repomd.xml"))
|
||||||
for entry in dom.getElementsByTagName('data'):
|
for entry in dom.getElementsByTagName("data"):
|
||||||
if entry.getAttribute('type') == type_:
|
if entry.getAttribute("type") == type_:
|
||||||
return entry.getElementsByTagName('location')[0].getAttribute('href')
|
return entry.getElementsByTagName("location")[0].getAttribute("href")
|
||||||
entry.unlink()
|
entry.unlink()
|
||||||
raise RuntimeError('No such file in repodata: %s' % type_)
|
raise RuntimeError("No such file in repodata: %s" % type_)
|
||||||
|
|
||||||
|
|
||||||
class CreaterepoThread(WorkerThread):
|
class CreaterepoThread(WorkerThread):
|
||||||
@ -274,7 +299,7 @@ class CreaterepoThread(WorkerThread):
|
|||||||
variant,
|
variant,
|
||||||
pkg_type=pkg_type,
|
pkg_type=pkg_type,
|
||||||
pkgset=self.reference_pkgset,
|
pkgset=self.reference_pkgset,
|
||||||
modules_metadata=self.modules_metadata
|
modules_metadata=self.modules_metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -308,7 +333,8 @@ def get_productids_from_scm(compose):
|
|||||||
# pem_files = glob.glob("%s/*.pem" % tmp_dir)[-1:]
|
# pem_files = glob.glob("%s/*.pem" % tmp_dir)[-1:]
|
||||||
if not pem_files:
|
if not pem_files:
|
||||||
warning = "No product certificate found (arch: %s, variant: %s)" % (
|
warning = "No product certificate found (arch: %s, variant: %s)" % (
|
||||||
arch, variant.uid
|
arch,
|
||||||
|
variant.uid,
|
||||||
)
|
)
|
||||||
if product_id_allow_missing:
|
if product_id_allow_missing:
|
||||||
compose.log_warning(warning)
|
compose.log_warning(warning)
|
||||||
@ -318,7 +344,14 @@ def get_productids_from_scm(compose):
|
|||||||
raise RuntimeError(warning)
|
raise RuntimeError(warning)
|
||||||
if len(pem_files) > 1:
|
if len(pem_files) > 1:
|
||||||
shutil.rmtree(tmp_dir)
|
shutil.rmtree(tmp_dir)
|
||||||
raise RuntimeError("Multiple product certificates found (arch: %s, variant: %s): %s" % (arch, variant.uid, ", ".join(sorted([os.path.basename(i) for i in pem_files]))))
|
raise RuntimeError(
|
||||||
|
"Multiple product certificates found (arch: %s, variant: %s): %s"
|
||||||
|
% (
|
||||||
|
arch,
|
||||||
|
variant.uid,
|
||||||
|
", ".join(sorted([os.path.basename(i) for i in pem_files])),
|
||||||
|
)
|
||||||
|
)
|
||||||
product_id_path = compose.paths.work.product_id(arch, variant)
|
product_id_path = compose.paths.work.product_id(arch, variant)
|
||||||
shutil.copy2(pem_files[0], product_id_path)
|
shutil.copy2(pem_files[0], product_id_path)
|
||||||
|
|
||||||
@ -331,23 +364,27 @@ def _get_old_package_dirs(compose, repo_dir):
|
|||||||
repo in an older compose and return a list of paths to directories with
|
repo in an older compose and return a list of paths to directories with
|
||||||
packages in it.
|
packages in it.
|
||||||
"""
|
"""
|
||||||
if not compose.conf['createrepo_deltas']:
|
if not compose.conf["createrepo_deltas"]:
|
||||||
return None
|
return None
|
||||||
old_compose_path = find_old_compose(
|
old_compose_path = find_old_compose(
|
||||||
compose.old_composes,
|
compose.old_composes,
|
||||||
compose.ci_base.release.short,
|
compose.ci_base.release.short,
|
||||||
compose.ci_base.release.version,
|
compose.ci_base.release.version,
|
||||||
compose.ci_base.release.type_suffix,
|
compose.ci_base.release.type_suffix,
|
||||||
compose.ci_base.base_product.short if compose.ci_base.release.is_layered else None,
|
compose.ci_base.base_product.short
|
||||||
compose.ci_base.base_product.version if compose.ci_base.release.is_layered else None,
|
if compose.ci_base.release.is_layered
|
||||||
allowed_statuses=['FINISHED', 'FINISHED_INCOMPLETE'],
|
else None,
|
||||||
|
compose.ci_base.base_product.version
|
||||||
|
if compose.ci_base.release.is_layered
|
||||||
|
else None,
|
||||||
|
allowed_statuses=["FINISHED", "FINISHED_INCOMPLETE"],
|
||||||
)
|
)
|
||||||
if not old_compose_path:
|
if not old_compose_path:
|
||||||
compose.log_info("No suitable old compose found in: %s" % compose.old_composes)
|
compose.log_info("No suitable old compose found in: %s" % compose.old_composes)
|
||||||
return None
|
return None
|
||||||
rel_dir = relative_path(repo_dir, compose.topdir.rstrip('/') + '/')
|
rel_dir = relative_path(repo_dir, compose.topdir.rstrip("/") + "/")
|
||||||
old_package_dirs = os.path.join(old_compose_path, rel_dir, 'Packages')
|
old_package_dirs = os.path.join(old_compose_path, rel_dir, "Packages")
|
||||||
if compose.conf['hashed_directories']:
|
if compose.conf["hashed_directories"]:
|
||||||
old_package_dirs = _find_package_dirs(old_package_dirs)
|
old_package_dirs = _find_package_dirs(old_package_dirs)
|
||||||
return old_package_dirs
|
return old_package_dirs
|
||||||
|
|
||||||
@ -370,7 +407,7 @@ def _find_package_dirs(base):
|
|||||||
|
|
||||||
def _has_deltas(compose, variant, arch):
|
def _has_deltas(compose, variant, arch):
|
||||||
"""Check if delta RPMs are enabled for given variant and architecture."""
|
"""Check if delta RPMs are enabled for given variant and architecture."""
|
||||||
key = 'createrepo_deltas'
|
key = "createrepo_deltas"
|
||||||
if isinstance(compose.conf.get(key), bool):
|
if isinstance(compose.conf.get(key), bool):
|
||||||
return compose.conf[key]
|
return compose.conf[key]
|
||||||
return any(get_arch_variant_data(compose.conf, key, arch, variant))
|
return any(get_arch_variant_data(compose.conf, key, arch, variant))
|
||||||
@ -383,18 +420,28 @@ class ModulesMetadata(object):
|
|||||||
self.modules_metadata_file = self.compose.paths.compose.metadata("modules.json")
|
self.modules_metadata_file = self.compose.paths.compose.metadata("modules.json")
|
||||||
self.productmd_modules_metadata = productmd.modules.Modules()
|
self.productmd_modules_metadata = productmd.modules.Modules()
|
||||||
self.productmd_modules_metadata.compose.id = copy.copy(self.compose.compose_id)
|
self.productmd_modules_metadata.compose.id = copy.copy(self.compose.compose_id)
|
||||||
self.productmd_modules_metadata.compose.type = copy.copy(self.compose.compose_type)
|
self.productmd_modules_metadata.compose.type = copy.copy(
|
||||||
self.productmd_modules_metadata.compose.date = copy.copy(self.compose.compose_date)
|
self.compose.compose_type
|
||||||
self.productmd_modules_metadata.compose.respin = copy.copy(self.compose.compose_respin)
|
)
|
||||||
|
self.productmd_modules_metadata.compose.date = copy.copy(
|
||||||
|
self.compose.compose_date
|
||||||
|
)
|
||||||
|
self.productmd_modules_metadata.compose.respin = copy.copy(
|
||||||
|
self.compose.compose_respin
|
||||||
|
)
|
||||||
|
|
||||||
def write_modules_metadata(self):
|
def write_modules_metadata(self):
|
||||||
"""
|
"""
|
||||||
flush modules metadata into file
|
flush modules metadata into file
|
||||||
"""
|
"""
|
||||||
self.compose.log_info("Writing modules metadata: %s" % self.modules_metadata_file)
|
self.compose.log_info(
|
||||||
|
"Writing modules metadata: %s" % self.modules_metadata_file
|
||||||
|
)
|
||||||
self.productmd_modules_metadata.dump(self.modules_metadata_file)
|
self.productmd_modules_metadata.dump(self.modules_metadata_file)
|
||||||
|
|
||||||
def prepare_module_metadata(self, variant, arch, nsvc, modulemd_path, category, module_rpms):
|
def prepare_module_metadata(
|
||||||
|
self, variant, arch, nsvc, modulemd_path, category, module_rpms
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Find koji tag which corresponds to the module and add record into
|
Find koji tag which corresponds to the module and add record into
|
||||||
module metadata structure.
|
module metadata structure.
|
||||||
|
@ -29,6 +29,7 @@ from pungi import metadata
|
|||||||
|
|
||||||
class ExtraFilesPhase(ConfigGuardedPhase):
|
class ExtraFilesPhase(ConfigGuardedPhase):
|
||||||
"""EXTRA_FILES"""
|
"""EXTRA_FILES"""
|
||||||
|
|
||||||
name = "extra_files"
|
name = "extra_files"
|
||||||
|
|
||||||
def __init__(self, compose, pkgset_phase):
|
def __init__(self, compose, pkgset_phase):
|
||||||
@ -58,8 +59,10 @@ class ExtraFilesPhase(ConfigGuardedPhase):
|
|||||||
self.metadata,
|
self.metadata,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.compose.log_info('[SKIP ] No extra files (arch: %s, variant: %s)'
|
self.compose.log_info(
|
||||||
% (arch, variant.uid))
|
"[SKIP ] No extra files (arch: %s, variant: %s)"
|
||||||
|
% (arch, variant.uid)
|
||||||
|
)
|
||||||
|
|
||||||
metadata_path = self.compose.paths.compose.metadata("extra_files.json")
|
metadata_path = self.compose.paths.compose.metadata("extra_files.json")
|
||||||
self.compose.log_info("Writing global extra files metadata: %s" % metadata_path)
|
self.compose.log_info("Writing global extra files metadata: %s" % metadata_path)
|
||||||
@ -69,7 +72,7 @@ class ExtraFilesPhase(ConfigGuardedPhase):
|
|||||||
def copy_extra_files(
|
def copy_extra_files(
|
||||||
compose, cfg, arch, variant, package_sets, extra_metadata, checksum_type=None
|
compose, cfg, arch, variant, package_sets, extra_metadata, checksum_type=None
|
||||||
):
|
):
|
||||||
checksum_type = checksum_type or compose.conf['media_checksums']
|
checksum_type = checksum_type or compose.conf["media_checksums"]
|
||||||
var_dict = {
|
var_dict = {
|
||||||
"arch": arch,
|
"arch": arch,
|
||||||
"variant_id": variant.id,
|
"variant_id": variant.id,
|
||||||
@ -95,14 +98,20 @@ def copy_extra_files(
|
|||||||
for package_set in package_sets:
|
for package_set in package_sets:
|
||||||
for pkgset_file in package_set[arch]:
|
for pkgset_file in package_set[arch]:
|
||||||
pkg_obj = package_set[arch][pkgset_file]
|
pkg_obj = package_set[arch][pkgset_file]
|
||||||
if pkg_is_rpm(pkg_obj) and _pkg_matches(pkg_obj, pkg_name, pkg_arch):
|
if pkg_is_rpm(pkg_obj) and _pkg_matches(
|
||||||
|
pkg_obj, pkg_name, pkg_arch
|
||||||
|
):
|
||||||
rpms.append(pkg_obj.file_path)
|
rpms.append(pkg_obj.file_path)
|
||||||
if not rpms:
|
if not rpms:
|
||||||
raise RuntimeError('No package matching %s in the package set.' % pattern)
|
raise RuntimeError(
|
||||||
|
"No package matching %s in the package set." % pattern
|
||||||
|
)
|
||||||
scm_dict["repo"] = rpms
|
scm_dict["repo"] = rpms
|
||||||
|
|
||||||
getter = get_file_from_scm if 'file' in scm_dict else get_dir_from_scm
|
getter = get_file_from_scm if "file" in scm_dict else get_dir_from_scm
|
||||||
target_path = os.path.join(extra_files_dir, scm_dict.get('target', '').lstrip('/'))
|
target_path = os.path.join(
|
||||||
|
extra_files_dir, scm_dict.get("target", "").lstrip("/")
|
||||||
|
)
|
||||||
getter(scm_dict, target_path, compose=compose)
|
getter(scm_dict, target_path, compose=compose)
|
||||||
|
|
||||||
if os.listdir(extra_files_dir):
|
if os.listdir(extra_files_dir):
|
||||||
@ -121,11 +130,12 @@ def copy_extra_files(
|
|||||||
|
|
||||||
def _pkg_matches(pkg_obj, name_glob, arch):
|
def _pkg_matches(pkg_obj, name_glob, arch):
|
||||||
"""Check if `pkg_obj` matches name and arch."""
|
"""Check if `pkg_obj` matches name and arch."""
|
||||||
return (fnmatch.fnmatch(pkg_obj.name, name_glob) and
|
return fnmatch.fnmatch(pkg_obj.name, name_glob) and (
|
||||||
(arch is None or arch == pkg_obj.arch))
|
arch is None or arch == pkg_obj.arch
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _is_external(rpm):
|
def _is_external(rpm):
|
||||||
"""Check if path to rpm points outside of the compose: i.e. it is an
|
"""Check if path to rpm points outside of the compose: i.e. it is an
|
||||||
absolute path or a URL."""
|
absolute path or a URL."""
|
||||||
return rpm.startswith('/') or '://' in rpm
|
return rpm.startswith("/") or "://" in rpm
|
||||||
|
@ -23,8 +23,12 @@ from productmd.extra_files import ExtraFiles
|
|||||||
from pungi import createiso
|
from pungi import createiso
|
||||||
from pungi import metadata
|
from pungi import metadata
|
||||||
from pungi.phases.base import ConfigGuardedPhase, PhaseBase, PhaseLoggerMixin
|
from pungi.phases.base import ConfigGuardedPhase, PhaseBase, PhaseLoggerMixin
|
||||||
from pungi.phases.createiso import (add_iso_to_metadata, copy_boot_images,
|
from pungi.phases.createiso import (
|
||||||
run_createiso_command, load_and_tweak_treeinfo)
|
add_iso_to_metadata,
|
||||||
|
copy_boot_images,
|
||||||
|
run_createiso_command,
|
||||||
|
load_and_tweak_treeinfo,
|
||||||
|
)
|
||||||
from pungi.util import failable, get_format_substs, get_variant_data, get_volid
|
from pungi.util import failable, get_format_substs, get_variant_data, get_volid
|
||||||
from pungi.wrappers import iso
|
from pungi.wrappers import iso
|
||||||
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
||||||
@ -38,24 +42,25 @@ class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
|||||||
self.pool = ThreadPool(logger=self.logger)
|
self.pool = ThreadPool(logger=self.logger)
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
for variant in self.compose.get_variants(types=['variant']):
|
for variant in self.compose.get_variants(types=["variant"]):
|
||||||
for config in get_variant_data(self.compose.conf, self.name, variant):
|
for config in get_variant_data(self.compose.conf, self.name, variant):
|
||||||
extra_arches = set(config.get('arches', [])) - set(variant.arches)
|
extra_arches = set(config.get("arches", [])) - set(variant.arches)
|
||||||
if extra_arches:
|
if extra_arches:
|
||||||
self.compose.log_warning(
|
self.compose.log_warning(
|
||||||
'Extra iso config for %s mentions non-existing arches: %s'
|
"Extra iso config for %s mentions non-existing arches: %s"
|
||||||
% (variant, ', '.join(sorted(extra_arches))))
|
% (variant, ", ".join(sorted(extra_arches)))
|
||||||
|
)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
commands = []
|
commands = []
|
||||||
|
|
||||||
for variant in self.compose.get_variants(types=['variant']):
|
for variant in self.compose.get_variants(types=["variant"]):
|
||||||
for config in get_variant_data(self.compose.conf, self.name, variant):
|
for config in get_variant_data(self.compose.conf, self.name, variant):
|
||||||
arches = set(variant.arches)
|
arches = set(variant.arches)
|
||||||
if config.get('arches'):
|
if config.get("arches"):
|
||||||
arches &= set(config['arches'])
|
arches &= set(config["arches"])
|
||||||
if not config['skip_src']:
|
if not config["skip_src"]:
|
||||||
arches.add('src')
|
arches.add("src")
|
||||||
for arch in sorted(arches):
|
for arch in sorted(arches):
|
||||||
commands.append((config, variant, arch))
|
commands.append((config, variant, arch))
|
||||||
|
|
||||||
@ -70,13 +75,15 @@ class ExtraIsosThread(WorkerThread):
|
|||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
self.num = num
|
self.num = num
|
||||||
compose, config, variant, arch = item
|
compose, config, variant, arch = item
|
||||||
can_fail = arch in config.get('failable_arches', [])
|
can_fail = arch in config.get("failable_arches", [])
|
||||||
with failable(compose, can_fail, variant, arch, 'extra_iso', logger=self.pool._logger):
|
with failable(
|
||||||
|
compose, can_fail, variant, arch, "extra_iso", logger=self.pool._logger
|
||||||
|
):
|
||||||
self.worker(compose, config, variant, arch)
|
self.worker(compose, config, variant, arch)
|
||||||
|
|
||||||
def worker(self, compose, config, variant, arch):
|
def worker(self, compose, config, variant, arch):
|
||||||
filename = get_filename(compose, variant, arch, config.get('filename'))
|
filename = get_filename(compose, variant, arch, config.get("filename"))
|
||||||
volid = get_volume_id(compose, variant, arch, config.get('volid', []))
|
volid = get_volume_id(compose, variant, arch, config.get("volid", []))
|
||||||
iso_dir = compose.paths.compose.iso_dir(arch, variant)
|
iso_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||||
iso_path = os.path.join(iso_dir, filename)
|
iso_path = os.path.join(iso_dir, filename)
|
||||||
|
|
||||||
@ -85,15 +92,15 @@ class ExtraIsosThread(WorkerThread):
|
|||||||
msg = "Creating ISO (arch: %s, variant: %s): %s" % (arch, variant, filename)
|
msg = "Creating ISO (arch: %s, variant: %s): %s" % (arch, variant, filename)
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
get_extra_files(compose, variant, arch, config.get('extra_files', []))
|
get_extra_files(compose, variant, arch, config.get("extra_files", []))
|
||||||
|
|
||||||
bootable = arch != "src" and bool(compose.conf.get('buildinstall_method'))
|
bootable = arch != "src" and bool(compose.conf.get("buildinstall_method"))
|
||||||
|
|
||||||
graft_points = get_iso_contents(
|
graft_points = get_iso_contents(
|
||||||
compose,
|
compose,
|
||||||
variant,
|
variant,
|
||||||
arch,
|
arch,
|
||||||
config['include_variants'],
|
config["include_variants"],
|
||||||
filename,
|
filename,
|
||||||
bootable=bootable,
|
bootable=bootable,
|
||||||
inherit_extra_files=config.get("inherit_extra_files", False),
|
inherit_extra_files=config.get("inherit_extra_files", False),
|
||||||
@ -108,24 +115,34 @@ class ExtraIsosThread(WorkerThread):
|
|||||||
supported=compose.supported,
|
supported=compose.supported,
|
||||||
hfs_compat=compose.conf["iso_hfs_ppc64le_compatible"],
|
hfs_compat=compose.conf["iso_hfs_ppc64le_compatible"],
|
||||||
)
|
)
|
||||||
if compose.conf['create_jigdo']:
|
if compose.conf["create_jigdo"]:
|
||||||
jigdo_dir = compose.paths.compose.jigdo_dir(arch, variant)
|
jigdo_dir = compose.paths.compose.jigdo_dir(arch, variant)
|
||||||
os_tree = compose.paths.compose.os_tree(arch, variant)
|
os_tree = compose.paths.compose.os_tree(arch, variant)
|
||||||
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
opts = opts._replace(jigdo_dir=jigdo_dir, os_tree=os_tree)
|
||||||
|
|
||||||
if bootable:
|
if bootable:
|
||||||
opts = opts._replace(buildinstall_method=compose.conf['buildinstall_method'])
|
opts = opts._replace(
|
||||||
|
buildinstall_method=compose.conf["buildinstall_method"]
|
||||||
|
)
|
||||||
|
|
||||||
script_file = os.path.join(compose.paths.work.tmp_dir(arch, variant),
|
script_file = os.path.join(
|
||||||
'extraiso-%s.sh' % filename)
|
compose.paths.work.tmp_dir(arch, variant), "extraiso-%s.sh" % filename
|
||||||
with open(script_file, 'w') as f:
|
)
|
||||||
|
with open(script_file, "w") as f:
|
||||||
createiso.write_script(opts, f)
|
createiso.write_script(opts, f)
|
||||||
|
|
||||||
run_createiso_command(self.num, compose, bootable, arch,
|
run_createiso_command(
|
||||||
['bash', script_file], [compose.topdir],
|
self.num,
|
||||||
|
compose,
|
||||||
|
bootable,
|
||||||
|
arch,
|
||||||
|
["bash", script_file],
|
||||||
|
[compose.topdir],
|
||||||
log_file=compose.paths.log.log_file(
|
log_file=compose.paths.log.log_file(
|
||||||
arch, "extraiso-%s" % os.path.basename(iso_path)),
|
arch, "extraiso-%s" % os.path.basename(iso_path)
|
||||||
with_jigdo=compose.conf['create_jigdo'])
|
),
|
||||||
|
with_jigdo=compose.conf["create_jigdo"],
|
||||||
|
)
|
||||||
|
|
||||||
img = add_iso_to_metadata(
|
img = add_iso_to_metadata(
|
||||||
compose,
|
compose,
|
||||||
@ -147,7 +164,7 @@ def get_extra_files(compose, variant, arch, extra_files):
|
|||||||
extra_files_dir = compose.paths.work.extra_iso_extra_files_dir(arch, variant)
|
extra_files_dir = compose.paths.work.extra_iso_extra_files_dir(arch, variant)
|
||||||
filelist = []
|
filelist = []
|
||||||
for scm_dict in extra_files:
|
for scm_dict in extra_files:
|
||||||
getter = get_file_from_scm if 'file' in scm_dict else get_dir_from_scm
|
getter = get_file_from_scm if "file" in scm_dict else get_dir_from_scm
|
||||||
target = scm_dict.get("target", "").lstrip("/")
|
target = scm_dict.get("target", "").lstrip("/")
|
||||||
target_path = os.path.join(extra_files_dir, target).rstrip("/")
|
target_path = os.path.join(extra_files_dir, target).rstrip("/")
|
||||||
filelist.extend(
|
filelist.extend(
|
||||||
@ -178,7 +195,7 @@ def get_iso_contents(
|
|||||||
files = {}
|
files = {}
|
||||||
if bootable:
|
if bootable:
|
||||||
buildinstall_dir = compose.paths.work.buildinstall_dir(arch, create_dir=False)
|
buildinstall_dir = compose.paths.work.buildinstall_dir(arch, create_dir=False)
|
||||||
if compose.conf['buildinstall_method'] == 'lorax':
|
if compose.conf["buildinstall_method"] == "lorax":
|
||||||
buildinstall_dir = os.path.join(buildinstall_dir, variant.uid)
|
buildinstall_dir = os.path.join(buildinstall_dir, variant.uid)
|
||||||
|
|
||||||
copy_boot_images(buildinstall_dir, iso_dir)
|
copy_boot_images(buildinstall_dir, iso_dir)
|
||||||
@ -199,13 +216,13 @@ def get_iso_contents(
|
|||||||
# Get packages...
|
# Get packages...
|
||||||
package_dir = compose.paths.compose.packages(arch, var)
|
package_dir = compose.paths.compose.packages(arch, var)
|
||||||
for k, v in iso.get_graft_points(compose, [package_dir]).items():
|
for k, v in iso.get_graft_points(compose, [package_dir]).items():
|
||||||
files[os.path.join(var.uid, 'Packages', k)] = v
|
files[os.path.join(var.uid, "Packages", k)] = v
|
||||||
|
|
||||||
# Get repodata...
|
# Get repodata...
|
||||||
tree_dir = compose.paths.compose.repository(arch, var)
|
tree_dir = compose.paths.compose.repository(arch, var)
|
||||||
repo_dir = os.path.join(tree_dir, 'repodata')
|
repo_dir = os.path.join(tree_dir, "repodata")
|
||||||
for k, v in iso.get_graft_points(compose, [repo_dir]).items():
|
for k, v in iso.get_graft_points(compose, [repo_dir]).items():
|
||||||
files[os.path.join(var.uid, 'repodata', k)] = v
|
files[os.path.join(var.uid, "repodata", k)] = v
|
||||||
|
|
||||||
if inherit_extra_files:
|
if inherit_extra_files:
|
||||||
# Get extra files...
|
# Get extra files...
|
||||||
@ -253,32 +270,41 @@ def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
|||||||
|
|
||||||
|
|
||||||
def get_filename(compose, variant, arch, format):
|
def get_filename(compose, variant, arch, format):
|
||||||
disc_type = compose.conf['disc_types'].get('dvd', 'dvd')
|
disc_type = compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
base_filename = compose.get_image_name(
|
base_filename = compose.get_image_name(
|
||||||
arch, variant, disc_type=disc_type, disc_num=1)
|
arch, variant, disc_type=disc_type, disc_num=1
|
||||||
|
)
|
||||||
if not format:
|
if not format:
|
||||||
return base_filename
|
return base_filename
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'arch': arch,
|
"arch": arch,
|
||||||
'disc_type': disc_type,
|
"disc_type": disc_type,
|
||||||
'disc_num': 1,
|
"disc_num": 1,
|
||||||
'suffix': '.iso',
|
"suffix": ".iso",
|
||||||
'filename': base_filename,
|
"filename": base_filename,
|
||||||
'variant': variant,
|
"variant": variant,
|
||||||
}
|
}
|
||||||
args = get_format_substs(compose, **kwargs)
|
args = get_format_substs(compose, **kwargs)
|
||||||
try:
|
try:
|
||||||
return (format % args).format(**args)
|
return (format % args).format(**args)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise RuntimeError('Failed to create image name: unknown format element: %s' % err)
|
raise RuntimeError(
|
||||||
|
"Failed to create image name: unknown format element: %s" % err
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_volume_id(compose, variant, arch, formats):
|
def get_volume_id(compose, variant, arch, formats):
|
||||||
disc_type = compose.conf['disc_types'].get('dvd', 'dvd')
|
disc_type = compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
# Get volume ID for regular ISO so that we can substitute it in.
|
# Get volume ID for regular ISO so that we can substitute it in.
|
||||||
volid = get_volid(compose, arch, variant, disc_type=disc_type)
|
volid = get_volid(compose, arch, variant, disc_type=disc_type)
|
||||||
return get_volid(compose, arch, variant, disc_type=disc_type,
|
return get_volid(
|
||||||
formats=force_list(formats), volid=volid)
|
compose,
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
|
disc_type=disc_type,
|
||||||
|
formats=force_list(formats),
|
||||||
|
volid=volid,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def prepare_media_metadata(compose, variant, arch):
|
def prepare_media_metadata(compose, variant, arch):
|
||||||
|
@ -22,6 +22,7 @@ import threading
|
|||||||
from kobo.rpmlib import parse_nvra
|
from kobo.rpmlib import parse_nvra
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
from productmd.rpms import Rpms
|
from productmd.rpms import Rpms
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -35,8 +36,7 @@ import pungi.wrappers.kojiwrapper
|
|||||||
from pungi.compose import get_ordered_variant_uids
|
from pungi.compose import get_ordered_variant_uids
|
||||||
from pungi.arch import get_compatible_arches, split_name_arch
|
from pungi.arch import get_compatible_arches, split_name_arch
|
||||||
from pungi.phases.base import PhaseBase
|
from pungi.phases.base import PhaseBase
|
||||||
from pungi.util import (get_arch_data, get_arch_variant_data, get_variant_data,
|
from pungi.util import get_arch_data, get_arch_variant_data, get_variant_data, makedirs
|
||||||
makedirs)
|
|
||||||
from pungi.module_util import Modulemd, collect_module_defaults
|
from pungi.module_util import Modulemd, collect_module_defaults
|
||||||
from pungi.phases.createrepo import add_modular_metadata
|
from pungi.phases.createrepo import add_modular_metadata
|
||||||
|
|
||||||
@ -44,6 +44,7 @@ from pungi.phases.createrepo import add_modular_metadata
|
|||||||
def get_gather_source(name):
|
def get_gather_source(name):
|
||||||
import pungi.phases.gather.sources
|
import pungi.phases.gather.sources
|
||||||
from .source import GatherSourceContainer
|
from .source import GatherSourceContainer
|
||||||
|
|
||||||
GatherSourceContainer.register_module(pungi.phases.gather.sources)
|
GatherSourceContainer.register_module(pungi.phases.gather.sources)
|
||||||
container = GatherSourceContainer()
|
container = GatherSourceContainer()
|
||||||
return container["GatherSource%s" % name]
|
return container["GatherSource%s" % name]
|
||||||
@ -52,6 +53,7 @@ def get_gather_source(name):
|
|||||||
def get_gather_method(name):
|
def get_gather_method(name):
|
||||||
import pungi.phases.gather.methods
|
import pungi.phases.gather.methods
|
||||||
from .method import GatherMethodContainer
|
from .method import GatherMethodContainer
|
||||||
|
|
||||||
GatherMethodContainer.register_module(pungi.phases.gather.methods)
|
GatherMethodContainer.register_module(pungi.phases.gather.methods)
|
||||||
container = GatherMethodContainer()
|
container = GatherMethodContainer()
|
||||||
return container["GatherMethod%s" % name]
|
return container["GatherMethod%s" % name]
|
||||||
@ -59,6 +61,7 @@ def get_gather_method(name):
|
|||||||
|
|
||||||
class GatherPhase(PhaseBase):
|
class GatherPhase(PhaseBase):
|
||||||
"""GATHER"""
|
"""GATHER"""
|
||||||
|
|
||||||
name = "gather"
|
name = "gather"
|
||||||
|
|
||||||
def __init__(self, compose, pkgset_phase):
|
def __init__(self, compose, pkgset_phase):
|
||||||
@ -80,7 +83,7 @@ class GatherPhase(PhaseBase):
|
|||||||
# Modules are not supported, check if we need them
|
# Modules are not supported, check if we need them
|
||||||
for variant in self.compose.variants.values():
|
for variant in self.compose.variants.values():
|
||||||
if variant.modules:
|
if variant.modules:
|
||||||
errors.append('Modular compose requires libmodulemd package.')
|
errors.append("Modular compose requires libmodulemd package.")
|
||||||
|
|
||||||
# check whether variants from configuration value 'variant_as_lookaside' are correct
|
# check whether variants from configuration value 'variant_as_lookaside' are correct
|
||||||
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
|
variant_as_lookaside = self.compose.conf.get("variant_as_lookaside", [])
|
||||||
@ -93,25 +96,30 @@ class GatherPhase(PhaseBase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise ValueError('\n'.join(errors))
|
raise ValueError("\n".join(errors))
|
||||||
|
|
||||||
def _write_manifest(self):
|
def _write_manifest(self):
|
||||||
self.compose.log_info("Writing RPM manifest: %s" % self.manifest_file)
|
self.compose.log_info("Writing RPM manifest: %s" % self.manifest_file)
|
||||||
self.manifest.dump(self.manifest_file)
|
self.manifest.dump(self.manifest_file)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
pkg_map = gather_wrapper(self.compose, self.pkgset_phase.package_sets,
|
pkg_map = gather_wrapper(
|
||||||
self.pkgset_phase.path_prefix)
|
self.compose, self.pkgset_phase.package_sets, self.pkgset_phase.path_prefix
|
||||||
|
)
|
||||||
|
|
||||||
for variant_uid in get_ordered_variant_uids(self.compose):
|
for variant_uid in get_ordered_variant_uids(self.compose):
|
||||||
variant = self.compose.all_variants[variant_uid]
|
variant = self.compose.all_variants[variant_uid]
|
||||||
if variant.is_empty:
|
if variant.is_empty:
|
||||||
continue
|
continue
|
||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
link_files(self.compose, arch, variant,
|
link_files(
|
||||||
|
self.compose,
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
pkg_map[arch][variant.uid],
|
pkg_map[arch][variant.uid],
|
||||||
self.pkgset_phase.package_sets,
|
self.pkgset_phase.package_sets,
|
||||||
manifest=self.manifest)
|
manifest=self.manifest,
|
||||||
|
)
|
||||||
|
|
||||||
self._write_manifest()
|
self._write_manifest()
|
||||||
|
|
||||||
@ -148,10 +156,12 @@ def get_gather_methods(compose, variant):
|
|||||||
global_method_name = methods
|
global_method_name = methods
|
||||||
if isinstance(methods, dict):
|
if isinstance(methods, dict):
|
||||||
try:
|
try:
|
||||||
methods = get_variant_data(compose.conf, 'gather_method', variant)[-1]
|
methods = get_variant_data(compose.conf, "gather_method", variant)[-1]
|
||||||
global_method_name = None
|
global_method_name = None
|
||||||
except IndexError:
|
except IndexError:
|
||||||
raise RuntimeError("Variant %s has no configured gather_method" % variant.uid)
|
raise RuntimeError(
|
||||||
|
"Variant %s has no configured gather_method" % variant.uid
|
||||||
|
)
|
||||||
return global_method_name, methods
|
return global_method_name, methods
|
||||||
|
|
||||||
|
|
||||||
@ -208,8 +218,9 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
|||||||
|
|
||||||
for source_name in ("module", "comps", "json"):
|
for source_name in ("module", "comps", "json"):
|
||||||
|
|
||||||
packages, groups, filter_packages = get_variant_packages(compose, arch, variant,
|
packages, groups, filter_packages = get_variant_packages(
|
||||||
source_name, package_sets)
|
compose, arch, variant, source_name, package_sets
|
||||||
|
)
|
||||||
if not packages and not groups:
|
if not packages and not groups:
|
||||||
# No inputs, nothing to do really.
|
# No inputs, nothing to do really.
|
||||||
continue
|
continue
|
||||||
@ -217,20 +228,32 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
|||||||
try:
|
try:
|
||||||
method_name = global_method_name or methods[source_name]
|
method_name = global_method_name or methods[source_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise RuntimeError("Variant %s has no configured gather_method for source %s"
|
raise RuntimeError(
|
||||||
% (variant.uid, source_name))
|
"Variant %s has no configured gather_method for source %s"
|
||||||
|
% (variant.uid, source_name)
|
||||||
|
)
|
||||||
|
|
||||||
GatherMethod = get_gather_method(method_name)
|
GatherMethod = get_gather_method(method_name)
|
||||||
method = GatherMethod(compose)
|
method = GatherMethod(compose)
|
||||||
method.source_name = source_name
|
method.source_name = source_name
|
||||||
compose.log_debug(
|
compose.log_debug(
|
||||||
"Gathering source %s, method %s (arch: %s, variant: %s)" % (source_name, method_name, arch, variant))
|
"Gathering source %s, method %s (arch: %s, variant: %s)"
|
||||||
pkg_map = method(arch, variant, packages, groups, filter_packages,
|
% (source_name, method_name, arch, variant)
|
||||||
multilib_whitelist, multilib_blacklist, package_sets,
|
)
|
||||||
|
pkg_map = method(
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
|
packages,
|
||||||
|
groups,
|
||||||
|
filter_packages,
|
||||||
|
multilib_whitelist,
|
||||||
|
multilib_blacklist,
|
||||||
|
package_sets,
|
||||||
fulltree_excludes=fulltree_excludes,
|
fulltree_excludes=fulltree_excludes,
|
||||||
prepopulate=prepopulate if source_name == 'comps' else set())
|
prepopulate=prepopulate if source_name == "comps" else set(),
|
||||||
|
)
|
||||||
|
|
||||||
for t in ('rpm', 'srpm', 'debuginfo'):
|
for t in ("rpm", "srpm", "debuginfo"):
|
||||||
result[t].extend(pkg_map.get(t, []))
|
result[t].extend(pkg_map.get(t, []))
|
||||||
|
|
||||||
compose.log_info("[DONE ] %s" % msg)
|
compose.log_info("[DONE ] %s" % msg)
|
||||||
@ -246,13 +269,15 @@ def write_packages(compose, arch, variant, pkg_map, path_prefix):
|
|||||||
compose.log_info("[BEGIN] %s" % msg)
|
compose.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
for pkg_type, pkgs in pkg_map.items():
|
for pkg_type, pkgs in pkg_map.items():
|
||||||
file_name = compose.paths.work.package_list(arch=arch, variant=variant, pkg_type=pkg_type)
|
file_name = compose.paths.work.package_list(
|
||||||
|
arch=arch, variant=variant, pkg_type=pkg_type
|
||||||
|
)
|
||||||
with open(file_name, "w") as pkg_list:
|
with open(file_name, "w") as pkg_list:
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
# TODO: flags?
|
# TODO: flags?
|
||||||
pkg_path = pkg["path"]
|
pkg_path = pkg["path"]
|
||||||
if pkg_path.startswith(path_prefix):
|
if pkg_path.startswith(path_prefix):
|
||||||
pkg_path = pkg_path[len(path_prefix):]
|
pkg_path = pkg_path[len(path_prefix) :]
|
||||||
pkg_list.write("%s\n" % pkg_path)
|
pkg_list.write("%s\n" % pkg_path)
|
||||||
|
|
||||||
compose.log_info("[DONE ] %s" % msg)
|
compose.log_info("[DONE ] %s" % msg)
|
||||||
@ -299,18 +324,23 @@ def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs
|
|||||||
if not pkg_path:
|
if not pkg_path:
|
||||||
continue
|
continue
|
||||||
nvra = parse_nvra(pkg_path)
|
nvra = parse_nvra(pkg_path)
|
||||||
key = ((nvra["name"], nvra["arch"]))
|
key = (nvra["name"], nvra["arch"])
|
||||||
|
|
||||||
if nvra["name"] in remove_pkgs.get(pkg_type, set()):
|
if nvra["name"] in remove_pkgs.get(pkg_type, set()):
|
||||||
# TODO: make an option to turn this off
|
# TODO: make an option to turn this off
|
||||||
if variant.type == "layered-product" and pkg_type in ("srpm", "debuginfo"):
|
if variant.type == "layered-product" and pkg_type in (
|
||||||
|
"srpm",
|
||||||
|
"debuginfo",
|
||||||
|
):
|
||||||
new_pkgs.append(pkg)
|
new_pkgs.append(pkg)
|
||||||
# User may not have addons available, therefore we need to
|
# User may not have addons available, therefore we need to
|
||||||
# keep addon SRPMs in layered products in order not to violate GPL.
|
# keep addon SRPMs in layered products in order not to violate GPL.
|
||||||
# The same applies on debuginfo availability.
|
# The same applies on debuginfo availability.
|
||||||
continue
|
continue
|
||||||
compose.log_warning("Removed addon package (arch: %s, variant: %s): %s: %s" % (
|
compose.log_warning(
|
||||||
arch, variant, pkg_type, pkg_path))
|
"Removed addon package (arch: %s, variant: %s): %s: %s"
|
||||||
|
% (arch, variant, pkg_type, pkg_path)
|
||||||
|
)
|
||||||
removed_pkgs[pkg_type].append(pkg)
|
removed_pkgs[pkg_type].append(pkg)
|
||||||
elif key not in parent_pkgs.get(pkg_type, set()):
|
elif key not in parent_pkgs.get(pkg_type, set()):
|
||||||
if "fulltree-exclude" in pkg["flags"] and "input" not in pkg["flags"]:
|
if "fulltree-exclude" in pkg["flags"] and "input" not in pkg["flags"]:
|
||||||
@ -326,10 +356,14 @@ def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs
|
|||||||
removed_pkgs[pkg_type].append(pkg)
|
removed_pkgs[pkg_type].append(pkg)
|
||||||
|
|
||||||
pkg_map[pkg_type] = new_pkgs
|
pkg_map[pkg_type] = new_pkgs
|
||||||
compose.log_info("Removed packages (arch: %s, variant: %s): %s: %s" % (
|
compose.log_info(
|
||||||
arch, variant, pkg_type, len(removed_pkgs[pkg_type])))
|
"Removed packages (arch: %s, variant: %s): %s: %s"
|
||||||
compose.log_info("Moved to parent (arch: %s, variant: %s): %s: %s" % (
|
% (arch, variant, pkg_type, len(removed_pkgs[pkg_type]))
|
||||||
arch, variant, pkg_type, len(move_to_parent_pkgs[pkg_type])))
|
)
|
||||||
|
compose.log_info(
|
||||||
|
"Moved to parent (arch: %s, variant: %s): %s: %s"
|
||||||
|
% (arch, variant, pkg_type, len(move_to_parent_pkgs[pkg_type]))
|
||||||
|
)
|
||||||
|
|
||||||
compose.log_info("[DONE ] %s" % msg)
|
compose.log_info("[DONE ] %s" % msg)
|
||||||
return addon_pkgs, move_to_parent_pkgs, removed_pkgs
|
return addon_pkgs, move_to_parent_pkgs, removed_pkgs
|
||||||
@ -347,39 +381,50 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||||||
return repo
|
return repo
|
||||||
|
|
||||||
makedirs(repo)
|
makedirs(repo)
|
||||||
msg = 'Generating lookaside repo from %s.%s' % (variant.uid, arch)
|
msg = "Generating lookaside repo from %s.%s" % (variant.uid, arch)
|
||||||
compose.log_info('[BEGIN] %s', msg)
|
compose.log_info("[BEGIN] %s", msg)
|
||||||
|
|
||||||
prefixes = {
|
prefixes = {
|
||||||
'repos': lambda: os.path.join(compose.paths.work.topdir(
|
"repos": lambda: os.path.join(
|
||||||
arch="global"), "download") + "/",
|
compose.paths.work.topdir(arch="global"), "download"
|
||||||
'koji': lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
)
|
||||||
compose.conf['koji_profile']).koji_module.config.topdir.rstrip("/") + "/"
|
+ "/",
|
||||||
|
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
||||||
|
compose.conf["koji_profile"]
|
||||||
|
).koji_module.config.topdir.rstrip("/")
|
||||||
|
+ "/",
|
||||||
}
|
}
|
||||||
path_prefix = prefixes[compose.conf['pkgset_source']]()
|
path_prefix = prefixes[compose.conf["pkgset_source"]]()
|
||||||
pkglist = compose.paths.work.lookaside_package_list(arch=arch, variant=variant)
|
pkglist = compose.paths.work.lookaside_package_list(arch=arch, variant=variant)
|
||||||
with open(pkglist, 'w') as f:
|
with open(pkglist, "w") as f:
|
||||||
for packages in pkg_map[arch][variant.uid].values():
|
for packages in pkg_map[arch][variant.uid].values():
|
||||||
for pkg in packages:
|
for pkg in packages:
|
||||||
pkg = pkg['path']
|
pkg = pkg["path"]
|
||||||
if path_prefix and pkg.startswith(path_prefix):
|
if path_prefix and pkg.startswith(path_prefix):
|
||||||
pkg = pkg[len(path_prefix):]
|
pkg = pkg[len(path_prefix) :]
|
||||||
f.write('%s\n' % pkg)
|
f.write("%s\n" % pkg)
|
||||||
|
|
||||||
cr = CreaterepoWrapper(compose.conf['createrepo_c'])
|
cr = CreaterepoWrapper(compose.conf["createrepo_c"])
|
||||||
update_metadata = None
|
update_metadata = None
|
||||||
if package_sets:
|
if package_sets:
|
||||||
pkgset = package_sets[-1]
|
pkgset = package_sets[-1]
|
||||||
update_metadata = compose.paths.work.pkgset_repo(pkgset.name, arch)
|
update_metadata = compose.paths.work.pkgset_repo(pkgset.name, arch)
|
||||||
cmd = cr.get_createrepo_cmd(path_prefix, update=True, database=True, skip_stat=True,
|
cmd = cr.get_createrepo_cmd(
|
||||||
|
path_prefix,
|
||||||
|
update=True,
|
||||||
|
database=True,
|
||||||
|
skip_stat=True,
|
||||||
pkglist=pkglist,
|
pkglist=pkglist,
|
||||||
outputdir=repo,
|
outputdir=repo,
|
||||||
baseurl="file://%s" % path_prefix,
|
baseurl="file://%s" % path_prefix,
|
||||||
workers=compose.conf["createrepo_num_workers"],
|
workers=compose.conf["createrepo_num_workers"],
|
||||||
update_md_path=update_metadata)
|
update_md_path=update_metadata,
|
||||||
run(cmd,
|
)
|
||||||
|
run(
|
||||||
|
cmd,
|
||||||
logfile=compose.paths.log.log_file(arch, "lookaside_repo_%s" % (variant.uid)),
|
logfile=compose.paths.log.log_file(arch, "lookaside_repo_%s" % (variant.uid)),
|
||||||
show_cmd=True)
|
show_cmd=True,
|
||||||
|
)
|
||||||
|
|
||||||
# Add modular metadata into the repo
|
# Add modular metadata into the repo
|
||||||
if variant.arch_mmds:
|
if variant.arch_mmds:
|
||||||
@ -399,7 +444,7 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||||||
)
|
)
|
||||||
add_modular_metadata(cr, repo, mod_index, log_file)
|
add_modular_metadata(cr, repo, mod_index, log_file)
|
||||||
|
|
||||||
compose.log_info('[DONE ] %s', msg)
|
compose.log_info("[DONE ] %s", msg)
|
||||||
|
|
||||||
return repo
|
return repo
|
||||||
|
|
||||||
@ -408,8 +453,8 @@ def _update_config(compose, variant_uid, arch, repo):
|
|||||||
"""
|
"""
|
||||||
Add the variant lookaside repository into the configuration.
|
Add the variant lookaside repository into the configuration.
|
||||||
"""
|
"""
|
||||||
lookasides = compose.conf.setdefault('gather_lookaside_repos', [])
|
lookasides = compose.conf.setdefault("gather_lookaside_repos", [])
|
||||||
lookasides.append(('^%s$' % variant_uid, {arch: repo}))
|
lookasides.append(("^%s$" % variant_uid, {arch: repo}))
|
||||||
|
|
||||||
|
|
||||||
def _update_lookaside_config(compose, variant, arch, pkg_map, package_sets=None):
|
def _update_lookaside_config(compose, variant, arch, pkg_map, package_sets=None):
|
||||||
@ -417,13 +462,17 @@ def _update_lookaside_config(compose, variant, arch, pkg_map, package_sets=None)
|
|||||||
Make sure lookaside repo for all variants that the given one depends on
|
Make sure lookaside repo for all variants that the given one depends on
|
||||||
exist, and that configuration is updated to use those repos.
|
exist, and that configuration is updated to use those repos.
|
||||||
"""
|
"""
|
||||||
for dest, lookaside_variant_uid in compose.conf.get('variant_as_lookaside', []):
|
for dest, lookaside_variant_uid in compose.conf.get("variant_as_lookaside", []):
|
||||||
lookaside_variant = compose.all_variants[lookaside_variant_uid]
|
lookaside_variant = compose.all_variants[lookaside_variant_uid]
|
||||||
if dest != variant.uid:
|
if dest != variant.uid:
|
||||||
continue
|
continue
|
||||||
if arch not in lookaside_variant.arches:
|
if arch not in lookaside_variant.arches:
|
||||||
compose.log_warning('[SKIP] Skipping lookaside from %s for %s.%s due to arch mismatch',
|
compose.log_warning(
|
||||||
lookaside_variant.uid, variant.uid, arch)
|
"[SKIP] Skipping lookaside from %s for %s.%s due to arch mismatch",
|
||||||
|
lookaside_variant.uid,
|
||||||
|
variant.uid,
|
||||||
|
arch,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
repo = _make_lookaside_repo(
|
repo = _make_lookaside_repo(
|
||||||
compose, lookaside_variant, arch, pkg_map, package_sets
|
compose, lookaside_variant, arch, pkg_map, package_sets
|
||||||
@ -431,7 +480,9 @@ def _update_lookaside_config(compose, variant, arch, pkg_map, package_sets=None)
|
|||||||
_update_config(compose, variant.uid, arch, repo)
|
_update_config(compose, variant.uid, arch, repo)
|
||||||
|
|
||||||
|
|
||||||
def _gather_variants(result, compose, variant_type, package_sets, exclude_fulltree=False):
|
def _gather_variants(
|
||||||
|
result, compose, variant_type, package_sets, exclude_fulltree=False
|
||||||
|
):
|
||||||
"""Run gathering on all arches of all variants of given type.
|
"""Run gathering on all arches of all variants of given type.
|
||||||
|
|
||||||
If ``exclude_fulltree`` is set, all source packages from parent variants
|
If ``exclude_fulltree`` is set, all source packages from parent variants
|
||||||
@ -448,7 +499,9 @@ def _gather_variants(result, compose, variant_type, package_sets, exclude_fulltr
|
|||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
fulltree_excludes = set()
|
fulltree_excludes = set()
|
||||||
if exclude_fulltree:
|
if exclude_fulltree:
|
||||||
for pkg_name, pkg_arch in get_parent_pkgs(arch, variant, result)["srpm"]:
|
for pkg_name, pkg_arch in get_parent_pkgs(arch, variant, result)[
|
||||||
|
"srpm"
|
||||||
|
]:
|
||||||
fulltree_excludes.add(pkg_name)
|
fulltree_excludes.add(pkg_name)
|
||||||
|
|
||||||
# Get lookaside repos for this variant from other variants. Based
|
# Get lookaside repos for this variant from other variants. Based
|
||||||
@ -467,7 +520,7 @@ def _gather_variants(result, compose, variant_type, package_sets, exclude_fulltr
|
|||||||
t = threading.Thread(
|
t = threading.Thread(
|
||||||
target=worker,
|
target=worker,
|
||||||
args=(que, errors, arch, compose, arch, variant, package_sets),
|
args=(que, errors, arch, compose, arch, variant, package_sets),
|
||||||
kwargs={'fulltree_excludes': fulltree_excludes},
|
kwargs={"fulltree_excludes": fulltree_excludes},
|
||||||
)
|
)
|
||||||
threads_list.append(t)
|
threads_list.append(t)
|
||||||
t.start()
|
t.start()
|
||||||
@ -487,7 +540,9 @@ def _gather_variants(result, compose, variant_type, package_sets, exclude_fulltr
|
|||||||
variant.nsvc_to_pkgset = None
|
variant.nsvc_to_pkgset = None
|
||||||
|
|
||||||
|
|
||||||
def _trim_variants(result, compose, variant_type, remove_pkgs=None, move_to_parent=True):
|
def _trim_variants(
|
||||||
|
result, compose, variant_type, remove_pkgs=None, move_to_parent=True
|
||||||
|
):
|
||||||
"""Trim all varians of given type.
|
"""Trim all varians of given type.
|
||||||
|
|
||||||
Returns a map of all packages included in these variants.
|
Returns a map of all packages included in these variants.
|
||||||
@ -498,7 +553,8 @@ def _trim_variants(result, compose, variant_type, remove_pkgs=None, move_to_pare
|
|||||||
pkg_map = result[arch][variant.uid]
|
pkg_map = result[arch][variant.uid]
|
||||||
parent_pkgs = get_parent_pkgs(arch, variant, result)
|
parent_pkgs = get_parent_pkgs(arch, variant, result)
|
||||||
included_packages, move_to_parent_pkgs, removed_pkgs = trim_packages(
|
included_packages, move_to_parent_pkgs, removed_pkgs = trim_packages(
|
||||||
compose, arch, variant, pkg_map, parent_pkgs, remove_pkgs=remove_pkgs)
|
compose, arch, variant, pkg_map, parent_pkgs, remove_pkgs=remove_pkgs
|
||||||
|
)
|
||||||
|
|
||||||
# update all_addon_pkgs
|
# update all_addon_pkgs
|
||||||
for pkg_type, pkgs in included_packages.items():
|
for pkg_type, pkgs in included_packages.items():
|
||||||
@ -509,8 +565,15 @@ def _trim_variants(result, compose, variant_type, remove_pkgs=None, move_to_pare
|
|||||||
parent_pkg_map = result[arch][variant.parent.uid]
|
parent_pkg_map = result[arch][variant.parent.uid]
|
||||||
for pkg_type, pkgs in move_to_parent_pkgs.items():
|
for pkg_type, pkgs in move_to_parent_pkgs.items():
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
compose.log_debug("Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s"
|
compose.log_debug(
|
||||||
% (arch, variant.uid, pkg_type, os.path.basename(pkg["path"])))
|
"Moving package to parent (arch: %s, variant: %s, pkg_type: %s): %s"
|
||||||
|
% (
|
||||||
|
arch,
|
||||||
|
variant.uid,
|
||||||
|
pkg_type,
|
||||||
|
os.path.basename(pkg["path"]),
|
||||||
|
)
|
||||||
|
)
|
||||||
if pkg not in parent_pkg_map[pkg_type]:
|
if pkg not in parent_pkg_map[pkg_type]:
|
||||||
parent_pkg_map[pkg_type].append(pkg)
|
parent_pkg_map[pkg_type].append(pkg)
|
||||||
return all_included_packages
|
return all_included_packages
|
||||||
@ -519,20 +582,28 @@ def _trim_variants(result, compose, variant_type, remove_pkgs=None, move_to_pare
|
|||||||
def gather_wrapper(compose, package_sets, path_prefix):
|
def gather_wrapper(compose, package_sets, path_prefix):
|
||||||
result = {}
|
result = {}
|
||||||
|
|
||||||
_gather_variants(result, compose, 'variant', package_sets)
|
_gather_variants(result, compose, "variant", package_sets)
|
||||||
_gather_variants(result, compose, 'addon', package_sets, exclude_fulltree=True)
|
_gather_variants(result, compose, "addon", package_sets, exclude_fulltree=True)
|
||||||
_gather_variants(result, compose, 'layered-product', package_sets, exclude_fulltree=True)
|
_gather_variants(
|
||||||
_gather_variants(result, compose, 'optional', package_sets)
|
result, compose, "layered-product", package_sets, exclude_fulltree=True
|
||||||
|
)
|
||||||
|
_gather_variants(result, compose, "optional", package_sets)
|
||||||
|
|
||||||
all_addon_pkgs = _trim_variants(result, compose, 'addon')
|
all_addon_pkgs = _trim_variants(result, compose, "addon")
|
||||||
# TODO do we really want to move packages to parent here?
|
# TODO do we really want to move packages to parent here?
|
||||||
all_lp_pkgs = _trim_variants(result, compose, 'layered-product', remove_pkgs=all_addon_pkgs)
|
all_lp_pkgs = _trim_variants(
|
||||||
|
result, compose, "layered-product", remove_pkgs=all_addon_pkgs
|
||||||
|
)
|
||||||
|
|
||||||
# merge all_addon_pkgs with all_lp_pkgs
|
# merge all_addon_pkgs with all_lp_pkgs
|
||||||
for pkg_type in set(all_addon_pkgs.keys()) | set(all_lp_pkgs.keys()):
|
for pkg_type in set(all_addon_pkgs.keys()) | set(all_lp_pkgs.keys()):
|
||||||
all_addon_pkgs.setdefault(pkg_type, set()).update(all_lp_pkgs.get(pkg_type, set()))
|
all_addon_pkgs.setdefault(pkg_type, set()).update(
|
||||||
|
all_lp_pkgs.get(pkg_type, set())
|
||||||
|
)
|
||||||
|
|
||||||
_trim_variants(result, compose, 'optional', remove_pkgs=all_addon_pkgs, move_to_parent=False)
|
_trim_variants(
|
||||||
|
result, compose, "optional", remove_pkgs=all_addon_pkgs, move_to_parent=False
|
||||||
|
)
|
||||||
|
|
||||||
# write packages (package lists) for all variants
|
# write packages (package lists) for all variants
|
||||||
for arch in compose.get_arches():
|
for arch in compose.get_arches():
|
||||||
@ -549,17 +620,21 @@ def write_prepopulate_file(compose):
|
|||||||
It is stored in a location where ``get_prepopulate_packages`` function
|
It is stored in a location where ``get_prepopulate_packages`` function
|
||||||
expects.
|
expects.
|
||||||
"""
|
"""
|
||||||
if 'gather_prepopulate' not in compose.conf:
|
if "gather_prepopulate" not in compose.conf:
|
||||||
return
|
return
|
||||||
|
|
||||||
prepopulate_file = os.path.join(compose.paths.work.topdir(arch="global"), "prepopulate.json")
|
prepopulate_file = os.path.join(
|
||||||
|
compose.paths.work.topdir(arch="global"), "prepopulate.json"
|
||||||
|
)
|
||||||
msg = "Writing prepopulate file: %s" % prepopulate_file
|
msg = "Writing prepopulate file: %s" % prepopulate_file
|
||||||
|
|
||||||
scm_dict = compose.conf["gather_prepopulate"]
|
scm_dict = compose.conf["gather_prepopulate"]
|
||||||
if isinstance(scm_dict, dict):
|
if isinstance(scm_dict, dict):
|
||||||
file_name = os.path.basename(scm_dict["file"])
|
file_name = os.path.basename(scm_dict["file"])
|
||||||
if scm_dict["scm"] == "file":
|
if scm_dict["scm"] == "file":
|
||||||
scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"]))
|
scm_dict["file"] = os.path.join(
|
||||||
|
compose.config_dir, os.path.basename(scm_dict["file"])
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
file_name = os.path.basename(scm_dict)
|
file_name = os.path.basename(scm_dict)
|
||||||
scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict))
|
scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict))
|
||||||
@ -581,7 +656,9 @@ def get_prepopulate_packages(compose, arch, variant, include_arch=True):
|
|||||||
"""
|
"""
|
||||||
result = set()
|
result = set()
|
||||||
|
|
||||||
prepopulate_file = os.path.join(compose.paths.work.topdir(arch="global"), "prepopulate.json")
|
prepopulate_file = os.path.join(
|
||||||
|
compose.paths.work.topdir(arch="global"), "prepopulate.json"
|
||||||
|
)
|
||||||
if not os.path.isfile(prepopulate_file):
|
if not os.path.isfile(prepopulate_file):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -597,7 +674,8 @@ def get_prepopulate_packages(compose, arch, variant, include_arch=True):
|
|||||||
if pkg_arch not in get_compatible_arches(arch, multilib=True):
|
if pkg_arch not in get_compatible_arches(arch, multilib=True):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Incompatible package arch '%s' for tree arch '%s' in prepopulate package '%s'"
|
"Incompatible package arch '%s' for tree arch '%s' in prepopulate package '%s'"
|
||||||
% (pkg_arch, arch, pkg_name))
|
% (pkg_arch, arch, pkg_name)
|
||||||
|
)
|
||||||
if include_arch:
|
if include_arch:
|
||||||
result.add(i)
|
result.add(i)
|
||||||
else:
|
else:
|
||||||
@ -609,10 +687,13 @@ def get_additional_packages(compose, arch, variant):
|
|||||||
result = set()
|
result = set()
|
||||||
for i in get_arch_variant_data(compose.conf, "additional_packages", arch, variant):
|
for i in get_arch_variant_data(compose.conf, "additional_packages", arch, variant):
|
||||||
pkg_name, pkg_arch = split_name_arch(i)
|
pkg_name, pkg_arch = split_name_arch(i)
|
||||||
if pkg_arch is not None and pkg_arch not in get_compatible_arches(arch, multilib=True):
|
if pkg_arch is not None and pkg_arch not in get_compatible_arches(
|
||||||
|
arch, multilib=True
|
||||||
|
):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Incompatible package arch '%s' for tree arch '%s' in additional package '%s'"
|
"Incompatible package arch '%s' for tree arch '%s' in additional package '%s'"
|
||||||
% (pkg_arch, arch, pkg_name))
|
% (pkg_arch, arch, pkg_name)
|
||||||
|
)
|
||||||
result.add((pkg_name, pkg_arch))
|
result.add((pkg_name, pkg_arch))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -669,23 +750,28 @@ def get_variant_packages(compose, arch, variant, source_name, package_sets=None)
|
|||||||
packages |= get_additional_packages(compose, arch, variant)
|
packages |= get_additional_packages(compose, arch, variant)
|
||||||
filter_packages |= get_filter_packages(compose, arch, variant)
|
filter_packages |= get_filter_packages(compose, arch, variant)
|
||||||
|
|
||||||
if compose.conf['filter_system_release_packages']:
|
if compose.conf["filter_system_release_packages"]:
|
||||||
system_release_packages, system_release_filter_packages = get_system_release_packages(
|
(
|
||||||
compose, arch, variant, package_sets)
|
system_release_packages,
|
||||||
|
system_release_filter_packages,
|
||||||
|
) = get_system_release_packages(compose, arch, variant, package_sets)
|
||||||
packages |= system_release_packages
|
packages |= system_release_packages
|
||||||
filter_packages |= system_release_filter_packages
|
filter_packages |= system_release_filter_packages
|
||||||
|
|
||||||
if variant.type == "optional":
|
if variant.type == "optional":
|
||||||
for var in variant.parent.get_variants(
|
for var in variant.parent.get_variants(
|
||||||
arch=arch, types=["self", "variant", "addon", "layered-product"]):
|
arch=arch, types=["self", "variant", "addon", "layered-product"]
|
||||||
|
):
|
||||||
var_packages, var_groups, _ = get_variant_packages(
|
var_packages, var_groups, _ = get_variant_packages(
|
||||||
compose, arch, var, source_name, package_sets=package_sets)
|
compose, arch, var, source_name, package_sets=package_sets
|
||||||
|
)
|
||||||
packages |= var_packages
|
packages |= var_packages
|
||||||
groups |= var_groups
|
groups |= var_groups
|
||||||
|
|
||||||
if variant.type in ["addon", "layered-product"]:
|
if variant.type in ["addon", "layered-product"]:
|
||||||
var_packages, var_groups, _ = get_variant_packages(
|
var_packages, var_groups, _ = get_variant_packages(
|
||||||
compose, arch, variant.parent, source_name, package_sets=package_sets)
|
compose, arch, variant.parent, source_name, package_sets=package_sets
|
||||||
|
)
|
||||||
packages |= var_packages
|
packages |= var_packages
|
||||||
groups |= var_groups
|
groups |= var_groups
|
||||||
|
|
||||||
@ -714,12 +800,16 @@ def get_system_release_packages(compose, arch, variant, package_sets):
|
|||||||
# search for best match
|
# search for best match
|
||||||
best_match = None
|
best_match = None
|
||||||
for pkg in system_release_packages:
|
for pkg in system_release_packages:
|
||||||
if pkg.name.endswith("release-%s" % variant.uid.lower()) or pkg.name.startswith("%s-release" % variant.uid.lower()):
|
if pkg.name.endswith(
|
||||||
|
"release-%s" % variant.uid.lower()
|
||||||
|
) or pkg.name.startswith("%s-release" % variant.uid.lower()):
|
||||||
best_match = pkg
|
best_match = pkg
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
# addons: return release packages from parent variant
|
# addons: return release packages from parent variant
|
||||||
return get_system_release_packages(compose, arch, variant.parent, package_sets)
|
return get_system_release_packages(
|
||||||
|
compose, arch, variant.parent, package_sets
|
||||||
|
)
|
||||||
|
|
||||||
if not best_match:
|
if not best_match:
|
||||||
# no package matches variant name -> pick the first one
|
# no package matches variant name -> pick the first one
|
||||||
@ -734,8 +824,9 @@ def get_system_release_packages(compose, arch, variant, package_sets):
|
|||||||
return packages, filter_packages
|
return packages, filter_packages
|
||||||
|
|
||||||
|
|
||||||
def get_packages_to_gather(compose, arch=None, variant=None, include_arch=True,
|
def get_packages_to_gather(
|
||||||
include_prepopulated=False):
|
compose, arch=None, variant=None, include_arch=True, include_prepopulated=False
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Returns the list of names of packages and list of names of groups which
|
Returns the list of names of packages and list of names of groups which
|
||||||
would be included in a compose as GATHER phase result.
|
would be included in a compose as GATHER phase result.
|
||||||
@ -771,7 +862,8 @@ def get_packages_to_gather(compose, arch=None, variant=None, include_arch=True,
|
|||||||
|
|
||||||
if include_prepopulated:
|
if include_prepopulated:
|
||||||
prepopulated = get_prepopulate_packages(
|
prepopulated = get_prepopulate_packages(
|
||||||
compose, arch, variant, include_arch)
|
compose, arch, variant, include_arch
|
||||||
|
)
|
||||||
packages = packages.union(prepopulated)
|
packages = packages.union(prepopulated)
|
||||||
|
|
||||||
return list(packages), list(groups)
|
return list(packages), list(groups)
|
||||||
|
@ -25,6 +25,7 @@ from pungi.linker import LinkerPool
|
|||||||
# DONE: show overall progress, not each file
|
# DONE: show overall progress, not each file
|
||||||
# TODO: (these should be logged separately)
|
# TODO: (these should be logged separately)
|
||||||
|
|
||||||
|
|
||||||
def _get_src_nevra(compose, pkg_obj, srpm_map):
|
def _get_src_nevra(compose, pkg_obj, srpm_map):
|
||||||
"""Return source N-E:V-R.A.rpm; guess if necessary."""
|
"""Return source N-E:V-R.A.rpm; guess if necessary."""
|
||||||
result = srpm_map.get(pkg_obj.sourcerpm, None)
|
result = srpm_map.get(pkg_obj.sourcerpm, None)
|
||||||
@ -32,7 +33,10 @@ def _get_src_nevra(compose, pkg_obj, srpm_map):
|
|||||||
nvra = kobo.rpmlib.parse_nvra(pkg_obj.sourcerpm)
|
nvra = kobo.rpmlib.parse_nvra(pkg_obj.sourcerpm)
|
||||||
nvra["epoch"] = pkg_obj.epoch
|
nvra["epoch"] = pkg_obj.epoch
|
||||||
result = kobo.rpmlib.make_nvra(nvra, add_rpm=True, force_epoch=True)
|
result = kobo.rpmlib.make_nvra(nvra, add_rpm=True, force_epoch=True)
|
||||||
compose.log_warning("Package %s has no SRPM available, guessing epoch: %s" % (pkg_obj.nevra, result))
|
compose.log_warning(
|
||||||
|
"Package %s has no SRPM available, guessing epoch: %s"
|
||||||
|
% (pkg_obj.nevra, result)
|
||||||
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -77,8 +81,14 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
|||||||
for pkg in pkg_map["srpm"]:
|
for pkg in pkg_map["srpm"]:
|
||||||
if "lookaside" in pkg["flags"]:
|
if "lookaside" in pkg["flags"]:
|
||||||
continue
|
continue
|
||||||
dst = os.path.join(packages_dir, get_package_path(os.path.basename(pkg["path"]), hashed_directories))
|
dst = os.path.join(
|
||||||
dst_relpath = os.path.join(packages_dir_relpath, get_package_path(os.path.basename(pkg["path"]), hashed_directories))
|
packages_dir,
|
||||||
|
get_package_path(os.path.basename(pkg["path"]), hashed_directories),
|
||||||
|
)
|
||||||
|
dst_relpath = os.path.join(
|
||||||
|
packages_dir_relpath,
|
||||||
|
get_package_path(os.path.basename(pkg["path"]), hashed_directories),
|
||||||
|
)
|
||||||
|
|
||||||
# link file
|
# link file
|
||||||
pool.queue_put((pkg["path"], dst))
|
pool.queue_put((pkg["path"], dst))
|
||||||
@ -86,7 +96,14 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
|||||||
# update rpm manifest
|
# update rpm manifest
|
||||||
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
|
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
|
||||||
nevra = pkg_obj.nevra
|
nevra = pkg_obj.nevra
|
||||||
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="source")
|
manifest.add(
|
||||||
|
variant.uid,
|
||||||
|
arch,
|
||||||
|
nevra,
|
||||||
|
path=dst_relpath,
|
||||||
|
sigkey=pkg_obj.signature,
|
||||||
|
category="source",
|
||||||
|
)
|
||||||
|
|
||||||
# update srpm_map
|
# update srpm_map
|
||||||
srpm_map.setdefault(pkg_obj.file_name, nevra)
|
srpm_map.setdefault(pkg_obj.file_name, nevra)
|
||||||
@ -96,8 +113,14 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
|||||||
for pkg in pkg_map["rpm"]:
|
for pkg in pkg_map["rpm"]:
|
||||||
if "lookaside" in pkg["flags"]:
|
if "lookaside" in pkg["flags"]:
|
||||||
continue
|
continue
|
||||||
dst = os.path.join(packages_dir, get_package_path(os.path.basename(pkg["path"]), hashed_directories))
|
dst = os.path.join(
|
||||||
dst_relpath = os.path.join(packages_dir_relpath, get_package_path(os.path.basename(pkg["path"]), hashed_directories))
|
packages_dir,
|
||||||
|
get_package_path(os.path.basename(pkg["path"]), hashed_directories),
|
||||||
|
)
|
||||||
|
dst_relpath = os.path.join(
|
||||||
|
packages_dir_relpath,
|
||||||
|
get_package_path(os.path.basename(pkg["path"]), hashed_directories),
|
||||||
|
)
|
||||||
|
|
||||||
# link file
|
# link file
|
||||||
pool.queue_put((pkg["path"], dst))
|
pool.queue_put((pkg["path"], dst))
|
||||||
@ -106,15 +129,31 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
|||||||
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
|
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
|
||||||
nevra = pkg_obj.nevra
|
nevra = pkg_obj.nevra
|
||||||
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
|
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
|
||||||
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="binary", srpm_nevra=src_nevra)
|
manifest.add(
|
||||||
|
variant.uid,
|
||||||
|
arch,
|
||||||
|
nevra,
|
||||||
|
path=dst_relpath,
|
||||||
|
sigkey=pkg_obj.signature,
|
||||||
|
category="binary",
|
||||||
|
srpm_nevra=src_nevra,
|
||||||
|
)
|
||||||
|
|
||||||
packages_dir = compose.paths.compose.debug_packages(arch, variant)
|
packages_dir = compose.paths.compose.debug_packages(arch, variant)
|
||||||
packages_dir_relpath = compose.paths.compose.debug_packages(arch, variant, relative=True)
|
packages_dir_relpath = compose.paths.compose.debug_packages(
|
||||||
|
arch, variant, relative=True
|
||||||
|
)
|
||||||
for pkg in pkg_map["debuginfo"]:
|
for pkg in pkg_map["debuginfo"]:
|
||||||
if "lookaside" in pkg["flags"]:
|
if "lookaside" in pkg["flags"]:
|
||||||
continue
|
continue
|
||||||
dst = os.path.join(packages_dir, get_package_path(os.path.basename(pkg["path"]), hashed_directories))
|
dst = os.path.join(
|
||||||
dst_relpath = os.path.join(packages_dir_relpath, get_package_path(os.path.basename(pkg["path"]), hashed_directories))
|
packages_dir,
|
||||||
|
get_package_path(os.path.basename(pkg["path"]), hashed_directories),
|
||||||
|
)
|
||||||
|
dst_relpath = os.path.join(
|
||||||
|
packages_dir_relpath,
|
||||||
|
get_package_path(os.path.basename(pkg["path"]), hashed_directories),
|
||||||
|
)
|
||||||
|
|
||||||
# link file
|
# link file
|
||||||
pool.queue_put((pkg["path"], dst))
|
pool.queue_put((pkg["path"], dst))
|
||||||
@ -123,7 +162,15 @@ def link_files(compose, arch, variant, pkg_map, pkg_sets, manifest, srpm_map={})
|
|||||||
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
|
pkg_obj = _find_by_path(pkg_sets, arch, pkg["path"])
|
||||||
nevra = pkg_obj.nevra
|
nevra = pkg_obj.nevra
|
||||||
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
|
src_nevra = _get_src_nevra(compose, pkg_obj, srpm_map)
|
||||||
manifest.add(variant.uid, arch, nevra, path=dst_relpath, sigkey=pkg_obj.signature, category="debug", srpm_nevra=src_nevra)
|
manifest.add(
|
||||||
|
variant.uid,
|
||||||
|
arch,
|
||||||
|
nevra,
|
||||||
|
path=dst_relpath,
|
||||||
|
sigkey=pkg_obj.signature,
|
||||||
|
category="debug",
|
||||||
|
srpm_nevra=src_nevra,
|
||||||
|
)
|
||||||
|
|
||||||
pool.start()
|
pool.start()
|
||||||
pool.stop()
|
pool.stop()
|
||||||
|
@ -18,7 +18,6 @@ import kobo.plugins
|
|||||||
|
|
||||||
|
|
||||||
class GatherMethodBase(kobo.plugins.Plugin):
|
class GatherMethodBase(kobo.plugins.Plugin):
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
|
|
||||||
|
@ -32,18 +32,43 @@ import pungi.phases.gather.method
|
|||||||
class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
|
class GatherMethodDeps(pungi.phases.gather.method.GatherMethodBase):
|
||||||
enabled = True
|
enabled = True
|
||||||
|
|
||||||
def __call__(self, arch, variant, packages, groups, filter_packages, multilib_whitelist, multilib_blacklist, package_sets, path_prefix=None, fulltree_excludes=None, prepopulate=None):
|
def __call__(
|
||||||
|
self,
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
|
packages,
|
||||||
|
groups,
|
||||||
|
filter_packages,
|
||||||
|
multilib_whitelist,
|
||||||
|
multilib_blacklist,
|
||||||
|
package_sets,
|
||||||
|
path_prefix=None,
|
||||||
|
fulltree_excludes=None,
|
||||||
|
prepopulate=None,
|
||||||
|
):
|
||||||
# result = {
|
# result = {
|
||||||
# "rpm": [],
|
# "rpm": [],
|
||||||
# "srpm": [],
|
# "srpm": [],
|
||||||
# "debuginfo": [],
|
# "debuginfo": [],
|
||||||
# }
|
# }
|
||||||
|
|
||||||
write_pungi_config(self.compose, arch, variant, packages, groups, filter_packages,
|
write_pungi_config(
|
||||||
multilib_whitelist, multilib_blacklist,
|
self.compose,
|
||||||
fulltree_excludes=fulltree_excludes, prepopulate=prepopulate,
|
arch,
|
||||||
source_name=self.source_name, package_sets=package_sets)
|
variant,
|
||||||
result, missing_deps = resolve_deps(self.compose, arch, variant, source_name=self.source_name)
|
packages,
|
||||||
|
groups,
|
||||||
|
filter_packages,
|
||||||
|
multilib_whitelist,
|
||||||
|
multilib_blacklist,
|
||||||
|
fulltree_excludes=fulltree_excludes,
|
||||||
|
prepopulate=prepopulate,
|
||||||
|
source_name=self.source_name,
|
||||||
|
package_sets=package_sets,
|
||||||
|
)
|
||||||
|
result, missing_deps = resolve_deps(
|
||||||
|
self.compose, arch, variant, source_name=self.source_name
|
||||||
|
)
|
||||||
raise_on_invalid_sigkeys(arch, variant, package_sets, result)
|
raise_on_invalid_sigkeys(arch, variant, package_sets, result)
|
||||||
check_deps(self.compose, arch, variant, missing_deps)
|
check_deps(self.compose, arch, variant, missing_deps)
|
||||||
return result
|
return result
|
||||||
@ -83,12 +108,25 @@ def _format_packages(pkgs):
|
|||||||
return sorted(result)
|
return sorted(result)
|
||||||
|
|
||||||
|
|
||||||
def write_pungi_config(compose, arch, variant, packages, groups, filter_packages,
|
def write_pungi_config(
|
||||||
multilib_whitelist, multilib_blacklist, fulltree_excludes=None,
|
compose,
|
||||||
prepopulate=None, source_name=None, package_sets=None):
|
arch,
|
||||||
|
variant,
|
||||||
|
packages,
|
||||||
|
groups,
|
||||||
|
filter_packages,
|
||||||
|
multilib_whitelist,
|
||||||
|
multilib_blacklist,
|
||||||
|
fulltree_excludes=None,
|
||||||
|
prepopulate=None,
|
||||||
|
source_name=None,
|
||||||
|
package_sets=None,
|
||||||
|
):
|
||||||
"""write pungi config (kickstart) for arch/variant"""
|
"""write pungi config (kickstart) for arch/variant"""
|
||||||
pungi_wrapper = PungiWrapper()
|
pungi_wrapper = PungiWrapper()
|
||||||
pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch, source_name=source_name)
|
pungi_cfg = compose.paths.work.pungi_conf(
|
||||||
|
variant=variant, arch=arch, source_name=source_name
|
||||||
|
)
|
||||||
|
|
||||||
compose.log_info(
|
compose.log_info(
|
||||||
"Writing pungi config (arch: %s, variant: %s): %s", arch, variant, pungi_cfg
|
"Writing pungi config (arch: %s, variant: %s): %s", arch, variant, pungi_cfg
|
||||||
@ -102,13 +140,20 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages
|
|||||||
repos["comps-repo"] = compose.paths.work.comps_repo(arch=arch, variant=variant)
|
repos["comps-repo"] = compose.paths.work.comps_repo(arch=arch, variant=variant)
|
||||||
if variant.type == "optional":
|
if variant.type == "optional":
|
||||||
for var in variant.parent.get_variants(
|
for var in variant.parent.get_variants(
|
||||||
arch=arch, types=["self", "variant", "addon", "layered-product"]):
|
arch=arch, types=["self", "variant", "addon", "layered-product"]
|
||||||
repos['%s-comps' % var.uid] = compose.paths.work.comps_repo(arch=arch, variant=var)
|
):
|
||||||
|
repos["%s-comps" % var.uid] = compose.paths.work.comps_repo(
|
||||||
|
arch=arch, variant=var
|
||||||
|
)
|
||||||
if variant.type in ["addon", "layered-product"]:
|
if variant.type in ["addon", "layered-product"]:
|
||||||
repos['parent-comps'] = compose.paths.work.comps_repo(arch=arch, variant=variant.parent)
|
repos["parent-comps"] = compose.paths.work.comps_repo(
|
||||||
|
arch=arch, variant=variant.parent
|
||||||
|
)
|
||||||
|
|
||||||
lookaside_repos = {}
|
lookaside_repos = {}
|
||||||
for i, repo_url in enumerate(pungi.phases.gather.get_lookaside_repos(compose, arch, variant)):
|
for i, repo_url in enumerate(
|
||||||
|
pungi.phases.gather.get_lookaside_repos(compose, arch, variant)
|
||||||
|
):
|
||||||
lookaside_repos["lookaside-repo-%s" % i] = repo_url
|
lookaside_repos["lookaside-repo-%s" % i] = repo_url
|
||||||
|
|
||||||
packages_str = list(_format_packages(packages))
|
packages_str = list(_format_packages(packages))
|
||||||
@ -116,15 +161,22 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages
|
|||||||
|
|
||||||
if not groups and not packages_str and not prepopulate:
|
if not groups and not packages_str and not prepopulate:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
'No packages included in %s.%s (no comps groups, no input packages, no prepopulate)'
|
"No packages included in %s.%s (no comps groups, no input packages, no prepopulate)"
|
||||||
% (variant.uid, arch))
|
% (variant.uid, arch)
|
||||||
|
)
|
||||||
|
|
||||||
pungi_wrapper.write_kickstart(
|
pungi_wrapper.write_kickstart(
|
||||||
ks_path=pungi_cfg, repos=repos, groups=groups, packages=packages_str,
|
ks_path=pungi_cfg,
|
||||||
|
repos=repos,
|
||||||
|
groups=groups,
|
||||||
|
packages=packages_str,
|
||||||
exclude_packages=filter_packages_str,
|
exclude_packages=filter_packages_str,
|
||||||
lookaside_repos=lookaside_repos, fulltree_excludes=fulltree_excludes,
|
lookaside_repos=lookaside_repos,
|
||||||
multilib_whitelist=multilib_whitelist, multilib_blacklist=multilib_blacklist,
|
fulltree_excludes=fulltree_excludes,
|
||||||
prepopulate=prepopulate)
|
multilib_whitelist=multilib_whitelist,
|
||||||
|
multilib_blacklist=multilib_blacklist,
|
||||||
|
prepopulate=prepopulate,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def resolve_deps(compose, arch, variant, source_name=None):
|
def resolve_deps(compose, arch, variant, source_name=None):
|
||||||
@ -136,7 +188,7 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
|||||||
compose.log_info("[BEGIN] %s" % msg)
|
compose.log_info("[BEGIN] %s" % msg)
|
||||||
pungi_conf = compose.paths.work.pungi_conf(arch, variant, source_name=source_name)
|
pungi_conf = compose.paths.work.pungi_conf(arch, variant, source_name=source_name)
|
||||||
|
|
||||||
multilib_methods = get_arch_variant_data(compose.conf, 'multilib', arch, variant)
|
multilib_methods = get_arch_variant_data(compose.conf, "multilib", arch, variant)
|
||||||
|
|
||||||
greedy_method = compose.conf["greedy_method"]
|
greedy_method = compose.conf["greedy_method"]
|
||||||
|
|
||||||
@ -159,7 +211,9 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
|||||||
selfhosting = False
|
selfhosting = False
|
||||||
|
|
||||||
lookaside_repos = {}
|
lookaside_repos = {}
|
||||||
for i, repo_url in enumerate(pungi.phases.gather.get_lookaside_repos(compose, arch, variant)):
|
for i, repo_url in enumerate(
|
||||||
|
pungi.phases.gather.get_lookaside_repos(compose, arch, variant)
|
||||||
|
):
|
||||||
lookaside_repos["lookaside-repo-%s" % i] = repo_url
|
lookaside_repos["lookaside-repo-%s" % i] = repo_url
|
||||||
|
|
||||||
yum_arch = tree_arch_to_yum_arch(arch)
|
yum_arch = tree_arch_to_yum_arch(arch)
|
||||||
@ -167,28 +221,40 @@ def resolve_deps(compose, arch, variant, source_name=None):
|
|||||||
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
|
cache_dir = compose.paths.work.pungi_cache_dir(arch, variant)
|
||||||
# TODO: remove YUM code, fully migrate to DNF
|
# TODO: remove YUM code, fully migrate to DNF
|
||||||
backends = {
|
backends = {
|
||||||
'yum': pungi_wrapper.get_pungi_cmd,
|
"yum": pungi_wrapper.get_pungi_cmd,
|
||||||
'dnf': pungi_wrapper.get_pungi_cmd_dnf,
|
"dnf": pungi_wrapper.get_pungi_cmd_dnf,
|
||||||
}
|
}
|
||||||
get_cmd = backends[compose.conf['gather_backend']]
|
get_cmd = backends[compose.conf["gather_backend"]]
|
||||||
cmd = get_cmd(pungi_conf, destdir=tmp_dir, name=variant.uid,
|
cmd = get_cmd(
|
||||||
selfhosting=selfhosting, fulltree=fulltree, arch=yum_arch,
|
pungi_conf,
|
||||||
full_archlist=True, greedy=greedy_method, cache_dir=cache_dir,
|
destdir=tmp_dir,
|
||||||
lookaside_repos=lookaside_repos, multilib_methods=multilib_methods,
|
name=variant.uid,
|
||||||
profiler=profiler)
|
selfhosting=selfhosting,
|
||||||
|
fulltree=fulltree,
|
||||||
|
arch=yum_arch,
|
||||||
|
full_archlist=True,
|
||||||
|
greedy=greedy_method,
|
||||||
|
cache_dir=cache_dir,
|
||||||
|
lookaside_repos=lookaside_repos,
|
||||||
|
multilib_methods=multilib_methods,
|
||||||
|
profiler=profiler,
|
||||||
|
)
|
||||||
# Use temp working directory directory as workaround for
|
# Use temp working directory directory as workaround for
|
||||||
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
|
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
|
||||||
with temp_dir(prefix='pungi_') as tmp_dir:
|
with temp_dir(prefix="pungi_") as tmp_dir:
|
||||||
run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir, env=os.environ)
|
run(cmd, logfile=pungi_log, show_cmd=True, workdir=tmp_dir, env=os.environ)
|
||||||
|
|
||||||
with open(pungi_log, "r") as f:
|
with open(pungi_log, "r") as f:
|
||||||
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
packages, broken_deps, missing_comps_pkgs = pungi_wrapper.parse_log(f)
|
||||||
|
|
||||||
if missing_comps_pkgs:
|
if missing_comps_pkgs:
|
||||||
log_msg = ("Packages mentioned in comps do not exist for %s.%s: %s"
|
log_msg = "Packages mentioned in comps do not exist for %s.%s: %s" % (
|
||||||
% (variant.uid, arch, ", ".join(sorted(missing_comps_pkgs))))
|
variant.uid,
|
||||||
|
arch,
|
||||||
|
", ".join(sorted(missing_comps_pkgs)),
|
||||||
|
)
|
||||||
compose.log_warning(log_msg)
|
compose.log_warning(log_msg)
|
||||||
if compose.conf['require_all_comps_packages']:
|
if compose.conf["require_all_comps_packages"]:
|
||||||
raise RuntimeError(log_msg)
|
raise RuntimeError(log_msg)
|
||||||
|
|
||||||
compose.log_info("[DONE ] %s" % msg)
|
compose.log_info("[DONE ] %s" % msg)
|
||||||
@ -202,5 +268,7 @@ def check_deps(compose, arch, variant, missing_deps):
|
|||||||
if missing_deps:
|
if missing_deps:
|
||||||
for pkg in sorted(missing_deps):
|
for pkg in sorted(missing_deps):
|
||||||
compose.log_error(
|
compose.log_error(
|
||||||
"Unresolved dependencies for %s.%s in package %s: %s" % (variant, arch, pkg, sorted(missing_deps[pkg])))
|
"Unresolved dependencies for %s.%s in package %s: %s"
|
||||||
|
% (variant, arch, pkg, sorted(missing_deps[pkg]))
|
||||||
|
)
|
||||||
raise RuntimeError("Unresolved dependencies detected")
|
raise RuntimeError("Unresolved dependencies detected")
|
||||||
|
@ -268,10 +268,14 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
|||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env["G_MESSAGES_PREFIXED"] = ""
|
env["G_MESSAGES_PREFIXED"] = ""
|
||||||
env["XDG_CACHE_HOME"] = cache_dir
|
env["XDG_CACHE_HOME"] = cache_dir
|
||||||
self.compose.log_debug("[BEGIN] Running fus (arch: %s, variant: %s)" % (arch, variant))
|
self.compose.log_debug(
|
||||||
|
"[BEGIN] Running fus (arch: %s, variant: %s)" % (arch, variant)
|
||||||
|
)
|
||||||
run(cmd, logfile=logfile, show_cmd=True, env=env)
|
run(cmd, logfile=logfile, show_cmd=True, env=env)
|
||||||
output, out_modules = fus.parse_output(logfile)
|
output, out_modules = fus.parse_output(logfile)
|
||||||
self.compose.log_debug("[DONE ] Running fus (arch: %s, variant: %s)" % (arch, variant))
|
self.compose.log_debug(
|
||||||
|
"[DONE ] Running fus (arch: %s, variant: %s)" % (arch, variant)
|
||||||
|
)
|
||||||
# No need to resolve modules again. They are not going to change.
|
# No need to resolve modules again. They are not going to change.
|
||||||
modules = []
|
modules = []
|
||||||
# Reset input packages as well to only solve newly added things.
|
# Reset input packages as well to only solve newly added things.
|
||||||
@ -397,7 +401,11 @@ class GatherMethodHybrid(pungi.phases.gather.method.GatherMethodBase):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
strict_nevra = "%s-%s:%s-%s.%s" % (
|
strict_nevra = "%s-%s:%s-%s.%s" % (
|
||||||
pkg.name, pkg.epoch or "0", pkg.version, pkg.release, pkg.arch
|
pkg.name,
|
||||||
|
pkg.epoch or "0",
|
||||||
|
pkg.version,
|
||||||
|
pkg.release,
|
||||||
|
pkg.arch,
|
||||||
)
|
)
|
||||||
if strict_nevra in self.modular_packages:
|
if strict_nevra in self.modular_packages:
|
||||||
# Wildcards should not match modular packages.
|
# Wildcards should not match modular packages.
|
||||||
|
@ -30,16 +30,28 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
|||||||
enabled = True
|
enabled = True
|
||||||
|
|
||||||
def __call__(self, arch, variant, *args, **kwargs):
|
def __call__(self, arch, variant, *args, **kwargs):
|
||||||
fname = 'gather-nodeps-%s' % variant.uid
|
fname = "gather-nodeps-%s" % variant.uid
|
||||||
if self.source_name:
|
if self.source_name:
|
||||||
fname += '-' + self.source_name
|
fname += "-" + self.source_name
|
||||||
log_file = self.compose.paths.log.log_file(arch, fname)
|
log_file = self.compose.paths.log.log_file(arch, fname)
|
||||||
with open(log_file, 'w') as log:
|
with open(log_file, "w") as log:
|
||||||
return self.worker(log, arch, variant, *args, **kwargs)
|
return self.worker(log, arch, variant, *args, **kwargs)
|
||||||
|
|
||||||
def worker(self, log, arch, variant, pkgs, groups, filter_packages,
|
def worker(
|
||||||
multilib_whitelist, multilib_blacklist, package_sets,
|
self,
|
||||||
path_prefix=None, fulltree_excludes=None, prepopulate=None):
|
log,
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
|
pkgs,
|
||||||
|
groups,
|
||||||
|
filter_packages,
|
||||||
|
multilib_whitelist,
|
||||||
|
multilib_blacklist,
|
||||||
|
package_sets,
|
||||||
|
path_prefix=None,
|
||||||
|
fulltree_excludes=None,
|
||||||
|
prepopulate=None,
|
||||||
|
):
|
||||||
result = {
|
result = {
|
||||||
"rpm": [],
|
"rpm": [],
|
||||||
"srpm": [],
|
"srpm": [],
|
||||||
@ -48,7 +60,7 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
|||||||
|
|
||||||
group_packages = expand_groups(self.compose, arch, variant, groups)
|
group_packages = expand_groups(self.compose, arch, variant, groups)
|
||||||
packages = pkgs | group_packages
|
packages = pkgs | group_packages
|
||||||
log.write('Requested packages:\n%s\n' % pformat(packages))
|
log.write("Requested packages:\n%s\n" % pformat(packages))
|
||||||
|
|
||||||
seen_rpms = {}
|
seen_rpms = {}
|
||||||
seen_srpms = {}
|
seen_srpms = {}
|
||||||
@ -58,59 +70,65 @@ class GatherMethodNodeps(pungi.phases.gather.method.GatherMethodBase):
|
|||||||
for i in valid_arches:
|
for i in valid_arches:
|
||||||
compatible_arches[i] = pungi.arch.get_compatible_arches(i)
|
compatible_arches[i] = pungi.arch.get_compatible_arches(i)
|
||||||
|
|
||||||
log.write('\nGathering rpms\n')
|
log.write("\nGathering rpms\n")
|
||||||
for pkg in iterate_packages(package_sets, arch):
|
for pkg in iterate_packages(package_sets, arch):
|
||||||
if not pkg_is_rpm(pkg):
|
if not pkg_is_rpm(pkg):
|
||||||
continue
|
continue
|
||||||
for gathered_pkg, pkg_arch in packages:
|
for gathered_pkg, pkg_arch in packages:
|
||||||
if isinstance(gathered_pkg, six.string_types) and not fnmatch(pkg.name, gathered_pkg):
|
if isinstance(gathered_pkg, six.string_types) and not fnmatch(
|
||||||
|
pkg.name, gathered_pkg
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
elif (type(gathered_pkg) in [SimpleRpmWrapper, RpmWrapper]
|
elif (
|
||||||
and pkg.nevra != gathered_pkg.nevra):
|
type(gathered_pkg) in [SimpleRpmWrapper, RpmWrapper]
|
||||||
|
and pkg.nevra != gathered_pkg.nevra
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
if pkg_arch is not None and pkg.arch != pkg_arch and pkg.arch != 'noarch':
|
if (
|
||||||
|
pkg_arch is not None
|
||||||
|
and pkg.arch != pkg_arch
|
||||||
|
and pkg.arch != "noarch"
|
||||||
|
):
|
||||||
continue
|
continue
|
||||||
result["rpm"].append({
|
result["rpm"].append(
|
||||||
"path": pkg.file_path,
|
{"path": pkg.file_path, "flags": ["input"]}
|
||||||
"flags": ["input"],
|
)
|
||||||
})
|
|
||||||
seen_rpms.setdefault(pkg.name, set()).add(pkg.arch)
|
seen_rpms.setdefault(pkg.name, set()).add(pkg.arch)
|
||||||
seen_srpms.setdefault(pkg.sourcerpm, set()).add(pkg.arch)
|
seen_srpms.setdefault(pkg.sourcerpm, set()).add(pkg.arch)
|
||||||
log.write('Added %s (matched %s.%s) (sourcerpm: %s)\n'
|
log.write(
|
||||||
% (pkg, gathered_pkg, pkg_arch, pkg.sourcerpm))
|
"Added %s (matched %s.%s) (sourcerpm: %s)\n"
|
||||||
|
% (pkg, gathered_pkg, pkg_arch, pkg.sourcerpm)
|
||||||
|
)
|
||||||
|
|
||||||
log.write('\nGathering source rpms\n')
|
log.write("\nGathering source rpms\n")
|
||||||
for pkg in iterate_packages(package_sets, arch):
|
for pkg in iterate_packages(package_sets, arch):
|
||||||
if not pkg_is_srpm(pkg):
|
if not pkg_is_srpm(pkg):
|
||||||
continue
|
continue
|
||||||
if pkg.file_name in seen_srpms:
|
if pkg.file_name in seen_srpms:
|
||||||
result["srpm"].append({
|
result["srpm"].append(
|
||||||
"path": pkg.file_path,
|
{"path": pkg.file_path, "flags": ["input"]}
|
||||||
"flags": ["input"],
|
)
|
||||||
})
|
log.write("Adding %s\n" % pkg)
|
||||||
log.write('Adding %s\n' % pkg)
|
|
||||||
|
|
||||||
log.write('\nGathering debuginfo packages\n')
|
log.write("\nGathering debuginfo packages\n")
|
||||||
for pkg in iterate_packages(package_sets, arch):
|
for pkg in iterate_packages(package_sets, arch):
|
||||||
if not pkg_is_debug(pkg):
|
if not pkg_is_debug(pkg):
|
||||||
continue
|
continue
|
||||||
if pkg.sourcerpm not in seen_srpms:
|
if pkg.sourcerpm not in seen_srpms:
|
||||||
log.write('Not considering %s: corresponding srpm not included\n' % pkg)
|
log.write("Not considering %s: corresponding srpm not included\n" % pkg)
|
||||||
continue
|
continue
|
||||||
pkg_arches = set(compatible_arches[pkg.arch]) - set(['noarch'])
|
pkg_arches = set(compatible_arches[pkg.arch]) - set(["noarch"])
|
||||||
seen_arches = set(seen_srpms[pkg.sourcerpm]) - set(['noarch'])
|
seen_arches = set(seen_srpms[pkg.sourcerpm]) - set(["noarch"])
|
||||||
if not (pkg_arches & seen_arches):
|
if not (pkg_arches & seen_arches):
|
||||||
# We only want to pull in a debuginfo if we have a binary
|
# We only want to pull in a debuginfo if we have a binary
|
||||||
# package for a compatible arch. Noarch packages should not
|
# package for a compatible arch. Noarch packages should not
|
||||||
# pull debuginfo (they would pull in all architectures).
|
# pull debuginfo (they would pull in all architectures).
|
||||||
log.write('Not including %s: no package for this arch\n'
|
log.write("Not including %s: no package for this arch\n" % pkg)
|
||||||
% pkg)
|
|
||||||
continue
|
continue
|
||||||
result["debuginfo"].append({
|
result["debuginfo"].append(
|
||||||
"path": pkg.file_path,
|
{"path": pkg.file_path, "flags": ["input"]}
|
||||||
"flags": ["input"],
|
)
|
||||||
})
|
log.write("Adding %s\n" % pkg)
|
||||||
log.write('Adding %s\n' % pkg)
|
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -130,10 +148,12 @@ def expand_groups(compose, arch, variant, groups, set_pkg_arch=True):
|
|||||||
comps.append(CompsWrapper(comps_file))
|
comps.append(CompsWrapper(comps_file))
|
||||||
|
|
||||||
if variant and variant.parent:
|
if variant and variant.parent:
|
||||||
parent_comps_file = compose.paths.work.comps(arch, variant.parent, create_dir=False)
|
parent_comps_file = compose.paths.work.comps(
|
||||||
|
arch, variant.parent, create_dir=False
|
||||||
|
)
|
||||||
comps.append(CompsWrapper(parent_comps_file))
|
comps.append(CompsWrapper(parent_comps_file))
|
||||||
|
|
||||||
if variant.type == 'optional':
|
if variant.type == "optional":
|
||||||
for v in variant.parent.variants.values():
|
for v in variant.parent.variants.values():
|
||||||
if v.id == variant.id:
|
if v.id == variant.id:
|
||||||
continue
|
continue
|
||||||
|
@ -18,7 +18,6 @@ import kobo.plugins
|
|||||||
|
|
||||||
|
|
||||||
class GatherSourceBase(kobo.plugins.Plugin):
|
class GatherSourceBase(kobo.plugins.Plugin):
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ class GatherSourceComps(pungi.phases.gather.source.GatherSourceBase):
|
|||||||
|
|
||||||
def __call__(self, arch, variant):
|
def __call__(self, arch, variant):
|
||||||
groups = set()
|
groups = set()
|
||||||
if not self.compose.conf.get('comps_file'):
|
if not self.compose.conf.get("comps_file"):
|
||||||
return set(), set()
|
return set(), set()
|
||||||
|
|
||||||
comps = CompsWrapper(self.compose.paths.work.comps(arch=arch, variant=variant))
|
comps = CompsWrapper(self.compose.paths.work.comps(arch=arch, variant=variant))
|
||||||
|
@ -20,27 +20,30 @@ from productmd.images import Image
|
|||||||
# name will be ending with. The extensions are used to filter out which task
|
# name will be ending with. The extensions are used to filter out which task
|
||||||
# results will be pulled into the compose.
|
# results will be pulled into the compose.
|
||||||
EXTENSIONS = {
|
EXTENSIONS = {
|
||||||
'docker': ['tar.gz', 'tar.xz'],
|
"docker": ["tar.gz", "tar.xz"],
|
||||||
'liveimg-squashfs': ['liveimg.squashfs'],
|
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||||
'qcow': ['qcow'],
|
"qcow": ["qcow"],
|
||||||
'qcow2': ['qcow2'],
|
"qcow2": ["qcow2"],
|
||||||
'raw': ['raw'],
|
"raw": ["raw"],
|
||||||
'raw-xz': ['raw.xz'],
|
"raw-xz": ["raw.xz"],
|
||||||
'rhevm-ova': ['rhevm.ova'],
|
"rhevm-ova": ["rhevm.ova"],
|
||||||
'tar-gz': ['tar.gz'],
|
"tar-gz": ["tar.gz"],
|
||||||
'vagrant-hyperv': ['vagrant-hyperv.box'],
|
"vagrant-hyperv": ["vagrant-hyperv.box"],
|
||||||
'vagrant-libvirt': ['vagrant-libvirt.box'],
|
"vagrant-libvirt": ["vagrant-libvirt.box"],
|
||||||
'vagrant-virtualbox': ['vagrant-virtualbox.box'],
|
"vagrant-virtualbox": ["vagrant-virtualbox.box"],
|
||||||
'vagrant-vmware-fusion': ['vagrant-vmware-fusion.box'],
|
"vagrant-vmware-fusion": ["vagrant-vmware-fusion.box"],
|
||||||
'vdi': ['vdi'],
|
"vdi": ["vdi"],
|
||||||
'vmdk': ['vmdk'],
|
"vmdk": ["vmdk"],
|
||||||
'vpc': ['vhd'],
|
"vpc": ["vhd"],
|
||||||
'vsphere-ova': ['vsphere.ova'],
|
"vsphere-ova": ["vsphere.ova"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ImageBuildPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase):
|
class ImageBuildPhase(
|
||||||
|
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
||||||
|
):
|
||||||
"""class for wrapping up koji image-build"""
|
"""class for wrapping up koji image-build"""
|
||||||
|
|
||||||
name = "image_build"
|
name = "image_build"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
@ -53,13 +56,13 @@ class ImageBuildPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
current variant. If the config is set, it will be removed from the
|
current variant. If the config is set, it will be removed from the
|
||||||
dict.
|
dict.
|
||||||
"""
|
"""
|
||||||
if variant.type != 'variant':
|
if variant.type != "variant":
|
||||||
# Buildinstall only runs for top-level variants. Nested variants
|
# Buildinstall only runs for top-level variants. Nested variants
|
||||||
# need to re-use install tree from parent.
|
# need to re-use install tree from parent.
|
||||||
variant = variant.parent
|
variant = variant.parent
|
||||||
|
|
||||||
install_tree_from = image_conf.pop('install_tree_from', variant.uid)
|
install_tree_from = image_conf.pop("install_tree_from", variant.uid)
|
||||||
if '://' in install_tree_from:
|
if "://" in install_tree_from:
|
||||||
# It's a URL, return it unchanged
|
# It's a URL, return it unchanged
|
||||||
return install_tree_from
|
return install_tree_from
|
||||||
if install_tree_from.startswith("/"):
|
if install_tree_from.startswith("/"):
|
||||||
@ -69,11 +72,14 @@ class ImageBuildPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
install_tree_source = self.compose.all_variants.get(install_tree_from)
|
install_tree_source = self.compose.all_variants.get(install_tree_from)
|
||||||
if not install_tree_source:
|
if not install_tree_source:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
'There is no variant %s to get install tree from when building image for %s.'
|
"There is no variant %s to get install tree from when building image for %s."
|
||||||
% (install_tree_from, variant.uid))
|
% (install_tree_from, variant.uid)
|
||||||
|
)
|
||||||
return translate_path(
|
return translate_path(
|
||||||
self.compose,
|
self.compose,
|
||||||
self.compose.paths.compose.os_tree('$arch', install_tree_source, create_dir=False)
|
self.compose.paths.compose.os_tree(
|
||||||
|
"$arch", install_tree_source, create_dir=False
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_repo(self, image_conf, variant):
|
def _get_repo(self, image_conf, variant):
|
||||||
@ -82,27 +88,29 @@ class ImageBuildPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
explicitly listed in config, followed by by repo for current variant
|
explicitly listed in config, followed by by repo for current variant
|
||||||
if it's not included in the list already.
|
if it's not included in the list already.
|
||||||
"""
|
"""
|
||||||
repos = shortcuts.force_list(image_conf.get('repo', []))
|
repos = shortcuts.force_list(image_conf.get("repo", []))
|
||||||
|
|
||||||
if not variant.is_empty and variant.uid not in repos:
|
if not variant.is_empty and variant.uid not in repos:
|
||||||
repos.append(variant.uid)
|
repos.append(variant.uid)
|
||||||
|
|
||||||
return ",".join(get_repo_urls(self.compose, repos, arch='$arch'))
|
return ",".join(get_repo_urls(self.compose, repos, arch="$arch"))
|
||||||
|
|
||||||
def _get_arches(self, image_conf, arches):
|
def _get_arches(self, image_conf, arches):
|
||||||
if 'arches' in image_conf['image-build']:
|
if "arches" in image_conf["image-build"]:
|
||||||
arches = set(image_conf['image-build'].get('arches', [])) & arches
|
arches = set(image_conf["image-build"].get("arches", [])) & arches
|
||||||
return sorted(arches)
|
return sorted(arches)
|
||||||
|
|
||||||
def _set_release(self, image_conf):
|
def _set_release(self, image_conf):
|
||||||
"""If release is set explicitly to None, replace it with date and respin."""
|
"""If release is set explicitly to None, replace it with date and respin."""
|
||||||
if 'release' in image_conf:
|
if "release" in image_conf:
|
||||||
image_conf['release'] = (version_generator(self.compose, image_conf['release']) or
|
image_conf["release"] = (
|
||||||
self.compose.image_release)
|
version_generator(self.compose, image_conf["release"])
|
||||||
|
or self.compose.image_release
|
||||||
|
)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for variant in self.compose.get_variants():
|
for variant in self.compose.get_variants():
|
||||||
arches = set([x for x in variant.arches if x != 'src'])
|
arches = set([x for x in variant.arches if x != "src"])
|
||||||
|
|
||||||
for image_conf in self.get_config_block(variant):
|
for image_conf in self.get_config_block(variant):
|
||||||
# We will modify the data, so we need to make a copy to
|
# We will modify the data, so we need to make a copy to
|
||||||
@ -112,54 +120,66 @@ class ImageBuildPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
|
|
||||||
# image_conf is passed to get_image_build_cmd as dict
|
# image_conf is passed to get_image_build_cmd as dict
|
||||||
|
|
||||||
image_conf["image-build"]['arches'] = self._get_arches(image_conf, arches)
|
image_conf["image-build"]["arches"] = self._get_arches(
|
||||||
if not image_conf["image-build"]['arches']:
|
image_conf, arches
|
||||||
|
)
|
||||||
|
if not image_conf["image-build"]["arches"]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Replace possible ambiguous ref name with explicit hash.
|
# Replace possible ambiguous ref name with explicit hash.
|
||||||
ksurl = self.get_ksurl(image_conf['image-build'])
|
ksurl = self.get_ksurl(image_conf["image-build"])
|
||||||
if ksurl:
|
if ksurl:
|
||||||
image_conf["image-build"]['ksurl'] = ksurl
|
image_conf["image-build"]["ksurl"] = ksurl
|
||||||
|
|
||||||
image_conf["image-build"]["variant"] = variant
|
image_conf["image-build"]["variant"] = variant
|
||||||
|
|
||||||
image_conf["image-build"]["install_tree"] = self._get_install_tree(image_conf['image-build'], variant)
|
image_conf["image-build"]["install_tree"] = self._get_install_tree(
|
||||||
|
image_conf["image-build"], variant
|
||||||
|
)
|
||||||
|
|
||||||
release = self.get_release(image_conf['image-build'])
|
release = self.get_release(image_conf["image-build"])
|
||||||
if release:
|
if release:
|
||||||
image_conf['image-build']['release'] = release
|
image_conf["image-build"]["release"] = release
|
||||||
|
|
||||||
image_conf['image-build']['version'] = self.get_version(image_conf['image-build'])
|
image_conf["image-build"]["version"] = self.get_version(
|
||||||
image_conf['image-build']['target'] = self.get_config(image_conf['image-build'], 'target')
|
image_conf["image-build"]
|
||||||
|
)
|
||||||
|
image_conf["image-build"]["target"] = self.get_config(
|
||||||
|
image_conf["image-build"], "target"
|
||||||
|
)
|
||||||
|
|
||||||
# Pungi config can either contain old [(format, suffix)], or
|
# Pungi config can either contain old [(format, suffix)], or
|
||||||
# just list of formats, or a single format.
|
# just list of formats, or a single format.
|
||||||
formats = []
|
formats = []
|
||||||
for format in force_list(image_conf["image-build"]["format"]):
|
for format in force_list(image_conf["image-build"]["format"]):
|
||||||
formats.append(format[0] if isinstance(format, (tuple, list)) else format)
|
formats.append(
|
||||||
|
format[0] if isinstance(format, (tuple, list)) else format
|
||||||
|
)
|
||||||
image_conf["image-build"]["format"] = formats
|
image_conf["image-build"]["format"] = formats
|
||||||
image_conf["image-build"]['repo'] = self._get_repo(image_conf['image-build'], variant)
|
image_conf["image-build"]["repo"] = self._get_repo(
|
||||||
|
image_conf["image-build"], variant
|
||||||
|
)
|
||||||
|
|
||||||
can_fail = image_conf['image-build'].pop('failable', [])
|
can_fail = image_conf["image-build"].pop("failable", [])
|
||||||
if can_fail == ['*']:
|
if can_fail == ["*"]:
|
||||||
can_fail = image_conf['image-build']['arches']
|
can_fail = image_conf["image-build"]["arches"]
|
||||||
if can_fail:
|
if can_fail:
|
||||||
image_conf['image-build']['can_fail'] = sorted(can_fail)
|
image_conf["image-build"]["can_fail"] = sorted(can_fail)
|
||||||
|
|
||||||
cmd = {
|
cmd = {
|
||||||
"image_conf": image_conf,
|
"image_conf": image_conf,
|
||||||
"conf_file": self.compose.paths.work.image_build_conf(
|
"conf_file": self.compose.paths.work.image_build_conf(
|
||||||
image_conf["image-build"]['variant'],
|
image_conf["image-build"]["variant"],
|
||||||
image_name=image_conf["image-build"]['name'],
|
image_name=image_conf["image-build"]["name"],
|
||||||
image_type='-'.join(formats),
|
image_type="-".join(formats),
|
||||||
arches=image_conf["image-build"]['arches'],
|
arches=image_conf["image-build"]["arches"],
|
||||||
),
|
),
|
||||||
"image_dir": self.compose.paths.compose.image_dir(variant),
|
"image_dir": self.compose.paths.compose.image_dir(variant),
|
||||||
"relative_image_dir": self.compose.paths.compose.image_dir(
|
"relative_image_dir": self.compose.paths.compose.image_dir(
|
||||||
variant, relative=True
|
variant, relative=True
|
||||||
),
|
),
|
||||||
"link_type": self.compose.conf["link_type"],
|
"link_type": self.compose.conf["link_type"],
|
||||||
"scratch": image_conf['image-build'].pop('scratch', False),
|
"scratch": image_conf["image-build"].pop("scratch", False),
|
||||||
}
|
}
|
||||||
self.pool.add(CreateImageBuildThread(self.pool))
|
self.pool.add(CreateImageBuildThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, cmd))
|
self.pool.queue_put((self.compose, cmd))
|
||||||
@ -175,33 +195,45 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
compose, cmd = item
|
compose, cmd = item
|
||||||
variant = cmd["image_conf"]["image-build"]["variant"]
|
variant = cmd["image_conf"]["image-build"]["variant"]
|
||||||
subvariant = cmd["image_conf"]["image-build"].get("subvariant", variant.uid)
|
subvariant = cmd["image_conf"]["image-build"].get("subvariant", variant.uid)
|
||||||
self.failable_arches = cmd["image_conf"]['image-build'].get('can_fail', '')
|
self.failable_arches = cmd["image_conf"]["image-build"].get("can_fail", "")
|
||||||
self.can_fail = self.failable_arches == cmd['image_conf']['image-build']['arches']
|
self.can_fail = (
|
||||||
with failable(compose, self.can_fail, variant, '*', 'image-build', subvariant,
|
self.failable_arches == cmd["image_conf"]["image-build"]["arches"]
|
||||||
logger=self.pool._logger):
|
)
|
||||||
|
with failable(
|
||||||
|
compose,
|
||||||
|
self.can_fail,
|
||||||
|
variant,
|
||||||
|
"*",
|
||||||
|
"image-build",
|
||||||
|
subvariant,
|
||||||
|
logger=self.pool._logger,
|
||||||
|
):
|
||||||
self.worker(num, compose, variant, subvariant, cmd)
|
self.worker(num, compose, variant, subvariant, cmd)
|
||||||
|
|
||||||
def worker(self, num, compose, variant, subvariant, cmd):
|
def worker(self, num, compose, variant, subvariant, cmd):
|
||||||
arches = cmd["image_conf"]["image-build"]['arches']
|
arches = cmd["image_conf"]["image-build"]["arches"]
|
||||||
formats = '-'.join(cmd['image_conf']['image-build']['format'])
|
formats = "-".join(cmd["image_conf"]["image-build"]["format"])
|
||||||
dash_arches = '-'.join(arches)
|
dash_arches = "-".join(arches)
|
||||||
log_file = compose.paths.log.log_file(
|
log_file = compose.paths.log.log_file(
|
||||||
dash_arches,
|
dash_arches, "imagebuild-%s-%s-%s" % (variant.uid, subvariant, formats)
|
||||||
"imagebuild-%s-%s-%s" % (variant.uid, subvariant, formats)
|
)
|
||||||
|
msg = (
|
||||||
|
"Creating image (formats: %s, arches: %s, variant: %s, subvariant: %s)"
|
||||||
|
% (formats, dash_arches, variant, subvariant)
|
||||||
)
|
)
|
||||||
msg = ("Creating image (formats: %s, arches: %s, variant: %s, subvariant: %s)"
|
|
||||||
% (formats, dash_arches, variant, subvariant))
|
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
||||||
|
|
||||||
# writes conf file for koji image-build
|
# writes conf file for koji image-build
|
||||||
self.pool.log_info("Writing image-build config for %s.%s into %s" % (
|
self.pool.log_info(
|
||||||
variant, dash_arches, cmd["conf_file"]))
|
"Writing image-build config for %s.%s into %s"
|
||||||
|
% (variant, dash_arches, cmd["conf_file"])
|
||||||
|
)
|
||||||
|
|
||||||
koji_cmd = koji_wrapper.get_image_build_cmd(cmd["image_conf"],
|
koji_cmd = koji_wrapper.get_image_build_cmd(
|
||||||
conf_file_dest=cmd["conf_file"],
|
cmd["image_conf"], conf_file_dest=cmd["conf_file"], scratch=cmd["scratch"]
|
||||||
scratch=cmd['scratch'])
|
)
|
||||||
|
|
||||||
# avoid race conditions?
|
# avoid race conditions?
|
||||||
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
|
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
|
||||||
@ -210,26 +242,34 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
self.pool.log_debug("build-image outputs: %s" % (output))
|
self.pool.log_debug("build-image outputs: %s" % (output))
|
||||||
if output["retcode"] != 0:
|
if output["retcode"] != 0:
|
||||||
self.fail(compose, cmd)
|
self.fail(compose, cmd)
|
||||||
raise RuntimeError("ImageBuild task failed: %s. See %s for more details."
|
raise RuntimeError(
|
||||||
% (output["task_id"], log_file))
|
"ImageBuild task failed: %s. See %s for more details."
|
||||||
|
% (output["task_id"], log_file)
|
||||||
|
)
|
||||||
|
|
||||||
# copy image to images/
|
# copy image to images/
|
||||||
image_infos = []
|
image_infos = []
|
||||||
|
|
||||||
paths = koji_wrapper.get_image_paths(
|
paths = koji_wrapper.get_image_paths(
|
||||||
output["task_id"],
|
output["task_id"],
|
||||||
callback=lambda arch: log_failed_task(compose, variant, arch, 'image-build', subvariant)
|
callback=lambda arch: log_failed_task(
|
||||||
|
compose, variant, arch, "image-build", subvariant
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
for arch, paths in paths.items():
|
for arch, paths in paths.items():
|
||||||
for path in paths:
|
for path in paths:
|
||||||
for format in cmd['image_conf']['image-build']['format']:
|
for format in cmd["image_conf"]["image-build"]["format"]:
|
||||||
for suffix in EXTENSIONS[format]:
|
for suffix in EXTENSIONS[format]:
|
||||||
if path.endswith(suffix):
|
if path.endswith(suffix):
|
||||||
image_infos.append({'path': path,
|
image_infos.append(
|
||||||
'suffix': suffix,
|
{
|
||||||
'type': format,
|
"path": path,
|
||||||
'arch': arch})
|
"suffix": suffix,
|
||||||
|
"type": format,
|
||||||
|
"arch": arch,
|
||||||
|
}
|
||||||
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
# The usecase here is that you can run koji image-build with multiple --format
|
# The usecase here is that you can run koji image-build with multiple --format
|
||||||
@ -237,30 +277,32 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
# image_build record
|
# image_build record
|
||||||
linker = Linker(logger=self.pool._logger)
|
linker = Linker(logger=self.pool._logger)
|
||||||
for image_info in image_infos:
|
for image_info in image_infos:
|
||||||
image_dir = cmd["image_dir"] % {"arch": image_info['arch']}
|
image_dir = cmd["image_dir"] % {"arch": image_info["arch"]}
|
||||||
makedirs(image_dir)
|
makedirs(image_dir)
|
||||||
relative_image_dir = cmd["relative_image_dir"] % {"arch": image_info['arch']}
|
relative_image_dir = cmd["relative_image_dir"] % {
|
||||||
|
"arch": image_info["arch"]
|
||||||
|
}
|
||||||
|
|
||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info['path']))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = os.path.realpath(image_info["path"])
|
src_file = os.path.realpath(image_info["path"])
|
||||||
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
img = Image(compose.im)
|
img = Image(compose.im)
|
||||||
img.type = image_info['type']
|
img.type = image_info["type"]
|
||||||
img.format = image_info['suffix']
|
img.format = image_info["suffix"]
|
||||||
img.path = os.path.join(relative_image_dir, os.path.basename(image_dest))
|
img.path = os.path.join(relative_image_dir, os.path.basename(image_dest))
|
||||||
img.mtime = get_mtime(image_dest)
|
img.mtime = get_mtime(image_dest)
|
||||||
img.size = get_file_size(image_dest)
|
img.size = get_file_size(image_dest)
|
||||||
img.arch = image_info['arch']
|
img.arch = image_info["arch"]
|
||||||
img.disc_number = 1 # We don't expect multiple disks
|
img.disc_number = 1 # We don't expect multiple disks
|
||||||
img.disc_count = 1
|
img.disc_count = 1
|
||||||
img.bootable = False
|
img.bootable = False
|
||||||
img.subvariant = subvariant
|
img.subvariant = subvariant
|
||||||
setattr(img, 'can_fail', self.can_fail)
|
setattr(img, "can_fail", self.can_fail)
|
||||||
setattr(img, 'deliverable', 'image-build')
|
setattr(img, "deliverable", "image-build")
|
||||||
compose.im.add(variant=variant.uid, arch=image_info['arch'], image=img)
|
compose.im.add(variant=variant.uid, arch=image_info["arch"], image=img)
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output['task_id']))
|
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
||||||
|
@ -19,12 +19,12 @@ class ImageChecksumPhase(PhaseBase):
|
|||||||
checksums. The manifest will be updated with the checksums.
|
checksums. The manifest will be updated with the checksums.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = 'image_checksum'
|
name = "image_checksum"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
super(ImageChecksumPhase, self).__init__(compose)
|
super(ImageChecksumPhase, self).__init__(compose)
|
||||||
self.checksums = self.compose.conf['media_checksums']
|
self.checksums = self.compose.conf["media_checksums"]
|
||||||
self.one_file = self.compose.conf['media_checksum_one_file']
|
self.one_file = self.compose.conf["media_checksum_one_file"]
|
||||||
|
|
||||||
def skip(self):
|
def skip(self):
|
||||||
# Skipping this phase does not make sense:
|
# Skipping this phase does not make sense:
|
||||||
@ -40,7 +40,7 @@ class ImageChecksumPhase(PhaseBase):
|
|||||||
errors.append(MULTIPLE_CHECKSUMS_ERROR)
|
errors.append(MULTIPLE_CHECKSUMS_ERROR)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise ValueError('\n'.join(errors))
|
raise ValueError("\n".join(errors))
|
||||||
|
|
||||||
def _get_images(self):
|
def _get_images(self):
|
||||||
"""Returns a mapping from directories to sets of ``Image``s.
|
"""Returns a mapping from directories to sets of ``Image``s.
|
||||||
@ -57,20 +57,37 @@ class ImageChecksumPhase(PhaseBase):
|
|||||||
return images
|
return images
|
||||||
|
|
||||||
def _get_base_filename(self, variant, arch, **kwargs):
|
def _get_base_filename(self, variant, arch, **kwargs):
|
||||||
base_checksum_name = self.compose.conf['media_checksum_base_filename']
|
base_checksum_name = self.compose.conf["media_checksum_base_filename"]
|
||||||
if base_checksum_name:
|
if base_checksum_name:
|
||||||
substs = get_format_substs(self.compose, variant=variant, arch=arch, **kwargs)
|
substs = get_format_substs(
|
||||||
|
self.compose, variant=variant, arch=arch, **kwargs
|
||||||
|
)
|
||||||
base_checksum_name = (base_checksum_name % substs).format(**substs)
|
base_checksum_name = (base_checksum_name % substs).format(**substs)
|
||||||
base_checksum_name += '-'
|
base_checksum_name += "-"
|
||||||
return base_checksum_name
|
return base_checksum_name
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
topdir = self.compose.paths.compose.topdir()
|
topdir = self.compose.paths.compose.topdir()
|
||||||
make_checksums(topdir, self.compose.im, self.checksums, self.one_file, self._get_base_filename)
|
make_checksums(
|
||||||
|
topdir,
|
||||||
|
self.compose.im,
|
||||||
|
self.checksums,
|
||||||
|
self.one_file,
|
||||||
|
self._get_base_filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _compute_checksums(results, cache, variant, arch, path, images,
|
def _compute_checksums(
|
||||||
checksum_types, base_checksum_name_gen, one_file):
|
results,
|
||||||
|
cache,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
path,
|
||||||
|
images,
|
||||||
|
checksum_types,
|
||||||
|
base_checksum_name_gen,
|
||||||
|
one_file,
|
||||||
|
):
|
||||||
for image in images:
|
for image in images:
|
||||||
filename = os.path.basename(image.path)
|
filename = os.path.basename(image.path)
|
||||||
full_path = os.path.join(path, filename)
|
full_path = os.path.join(path, filename)
|
||||||
@ -83,23 +100,29 @@ def _compute_checksums(results, cache, variant, arch, path, images,
|
|||||||
# Source ISO is listed under each binary architecture. There's no
|
# Source ISO is listed under each binary architecture. There's no
|
||||||
# point in checksumming it twice, so we can just remember the
|
# point in checksumming it twice, so we can just remember the
|
||||||
# digest from first run..
|
# digest from first run..
|
||||||
cache[full_path] = shortcuts.compute_file_checksums(full_path, checksum_types)
|
cache[full_path] = shortcuts.compute_file_checksums(
|
||||||
|
full_path, checksum_types
|
||||||
|
)
|
||||||
digests = cache[full_path]
|
digests = cache[full_path]
|
||||||
for checksum, digest in digests.items():
|
for checksum, digest in digests.items():
|
||||||
# Update metadata with the checksum
|
# Update metadata with the checksum
|
||||||
image.add_checksum(None, checksum, digest)
|
image.add_checksum(None, checksum, digest)
|
||||||
# If not turned of, create the file-specific checksum file
|
# If not turned of, create the file-specific checksum file
|
||||||
if not one_file:
|
if not one_file:
|
||||||
checksum_filename = os.path.join(path, '%s.%sSUM' % (filename, checksum.upper()))
|
checksum_filename = os.path.join(
|
||||||
|
path, "%s.%sSUM" % (filename, checksum.upper())
|
||||||
|
)
|
||||||
results[checksum_filename].add((filename, filesize, checksum, digest))
|
results[checksum_filename].add((filename, filesize, checksum, digest))
|
||||||
|
|
||||||
if one_file:
|
if one_file:
|
||||||
dirname = os.path.basename(path)
|
dirname = os.path.basename(path)
|
||||||
base_checksum_name = base_checksum_name_gen(variant, arch, dirname=dirname)
|
base_checksum_name = base_checksum_name_gen(
|
||||||
checksum_filename = base_checksum_name + 'CHECKSUM'
|
variant, arch, dirname=dirname
|
||||||
|
)
|
||||||
|
checksum_filename = base_checksum_name + "CHECKSUM"
|
||||||
else:
|
else:
|
||||||
base_checksum_name = base_checksum_name_gen(variant, arch)
|
base_checksum_name = base_checksum_name_gen(variant, arch)
|
||||||
checksum_filename = '%s%sSUM' % (base_checksum_name, checksum.upper())
|
checksum_filename = "%s%sSUM" % (base_checksum_name, checksum.upper())
|
||||||
checksum_path = os.path.join(path, checksum_filename)
|
checksum_path = os.path.join(path, checksum_filename)
|
||||||
|
|
||||||
results[checksum_path].add((filename, filesize, checksum, digest))
|
results[checksum_path].add((filename, filesize, checksum, digest))
|
||||||
@ -109,8 +132,17 @@ def make_checksums(topdir, im, checksum_types, one_file, base_checksum_name_gen)
|
|||||||
results = defaultdict(set)
|
results = defaultdict(set)
|
||||||
cache = {}
|
cache = {}
|
||||||
for (variant, arch, path), images in get_images(topdir, im).items():
|
for (variant, arch, path), images in get_images(topdir, im).items():
|
||||||
_compute_checksums(results, cache, variant, arch, path, images,
|
_compute_checksums(
|
||||||
checksum_types, base_checksum_name_gen, one_file)
|
results,
|
||||||
|
cache,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
path,
|
||||||
|
images,
|
||||||
|
checksum_types,
|
||||||
|
base_checksum_name_gen,
|
||||||
|
one_file,
|
||||||
|
)
|
||||||
|
|
||||||
for file in results:
|
for file in results:
|
||||||
dump_checksums(file, results[file])
|
dump_checksums(file, results[file])
|
||||||
@ -122,10 +154,10 @@ def dump_checksums(checksum_file, data):
|
|||||||
:param checksum_file: where to write the checksums
|
:param checksum_file: where to write the checksums
|
||||||
:param data: an iterable of tuples (filename, filesize, checksum_type, hash)
|
:param data: an iterable of tuples (filename, filesize, checksum_type, hash)
|
||||||
"""
|
"""
|
||||||
with open(checksum_file, 'w') as f:
|
with open(checksum_file, "w") as f:
|
||||||
for filename, filesize, alg, checksum in sorted(data):
|
for filename, filesize, alg, checksum in sorted(data):
|
||||||
f.write('# %s: %s bytes\n' % (filename, filesize))
|
f.write("# %s: %s bytes\n" % (filename, filesize))
|
||||||
f.write('%s (%s) = %s\n' % (alg.upper(), filename, checksum))
|
f.write("%s (%s) = %s\n" % (alg.upper(), filename, checksum))
|
||||||
|
|
||||||
|
|
||||||
def get_images(top_dir, manifest):
|
def get_images(top_dir, manifest):
|
||||||
|
@ -32,6 +32,7 @@ from pungi.wrappers.scm import get_dir_from_scm, get_file_from_scm
|
|||||||
|
|
||||||
class InitPhase(PhaseBase):
|
class InitPhase(PhaseBase):
|
||||||
"""INIT is a mandatory phase"""
|
"""INIT is a mandatory phase"""
|
||||||
|
|
||||||
name = "init"
|
name = "init"
|
||||||
|
|
||||||
def skip(self):
|
def skip(self):
|
||||||
@ -44,7 +45,7 @@ class InitPhase(PhaseBase):
|
|||||||
# write global comps and arch comps, create comps repos
|
# write global comps and arch comps, create comps repos
|
||||||
global_comps = write_global_comps(self.compose)
|
global_comps = write_global_comps(self.compose)
|
||||||
validate_comps(global_comps)
|
validate_comps(global_comps)
|
||||||
num_workers = self.compose.conf['createrepo_num_threads']
|
num_workers = self.compose.conf["createrepo_num_threads"]
|
||||||
run_in_threads(
|
run_in_threads(
|
||||||
_arch_worker,
|
_arch_worker,
|
||||||
[(self.compose, arch) for arch in self.compose.get_arches()],
|
[(self.compose, arch) for arch in self.compose.get_arches()],
|
||||||
@ -112,12 +113,18 @@ def write_arch_comps(compose, arch):
|
|||||||
comps_file_arch = compose.paths.work.comps(arch=arch)
|
comps_file_arch = compose.paths.work.comps(arch=arch)
|
||||||
|
|
||||||
compose.log_debug("Writing comps file for arch '%s': %s", arch, comps_file_arch)
|
compose.log_debug("Writing comps file for arch '%s': %s", arch, comps_file_arch)
|
||||||
run(["comps_filter", "--arch=%s" % arch, "--no-cleanup",
|
run(
|
||||||
|
[
|
||||||
|
"comps_filter",
|
||||||
|
"--arch=%s" % arch,
|
||||||
|
"--no-cleanup",
|
||||||
"--output=%s" % comps_file_arch,
|
"--output=%s" % comps_file_arch,
|
||||||
compose.paths.work.comps(arch="global")])
|
compose.paths.work.comps(arch="global"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
UNMATCHED_GROUP_MSG = 'Variant %s.%s requires comps group %s which does not match anything in input comps file'
|
UNMATCHED_GROUP_MSG = "Variant %s.%s requires comps group %s which does not match anything in input comps file"
|
||||||
|
|
||||||
|
|
||||||
def get_lookaside_groups(compose, variant):
|
def get_lookaside_groups(compose, variant):
|
||||||
@ -146,14 +153,14 @@ def write_variant_comps(compose, arch, variant):
|
|||||||
"--keep-empty-group=conflicts-%s" % variant.uid.lower(),
|
"--keep-empty-group=conflicts-%s" % variant.uid.lower(),
|
||||||
"--variant=%s" % variant.uid,
|
"--variant=%s" % variant.uid,
|
||||||
"--output=%s" % comps_file,
|
"--output=%s" % comps_file,
|
||||||
compose.paths.work.comps(arch="global")
|
compose.paths.work.comps(arch="global"),
|
||||||
]
|
]
|
||||||
for group in get_lookaside_groups(compose, variant):
|
for group in get_lookaside_groups(compose, variant):
|
||||||
cmd.append("--lookaside-group=%s" % group)
|
cmd.append("--lookaside-group=%s" % group)
|
||||||
run(cmd)
|
run(cmd)
|
||||||
|
|
||||||
comps = CompsWrapper(comps_file)
|
comps = CompsWrapper(comps_file)
|
||||||
if variant.groups or variant.modules is not None or variant.type != 'variant':
|
if variant.groups or variant.modules is not None or variant.type != "variant":
|
||||||
# Filter groups if the variant has some, or it's a modular variant, or
|
# Filter groups if the variant has some, or it's a modular variant, or
|
||||||
# is not a base variant.
|
# is not a base variant.
|
||||||
unmatched = comps.filter_groups(variant.groups)
|
unmatched = comps.filter_groups(variant.groups)
|
||||||
@ -175,11 +182,15 @@ def create_comps_repo(compose, arch, variant):
|
|||||||
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
repo = CreaterepoWrapper(createrepo_c=createrepo_c)
|
||||||
comps_repo = compose.paths.work.comps_repo(arch=arch, variant=variant)
|
comps_repo = compose.paths.work.comps_repo(arch=arch, variant=variant)
|
||||||
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
comps_path = compose.paths.work.comps(arch=arch, variant=variant)
|
||||||
msg = "Creating comps repo for arch '%s' variant '%s'" % (arch, variant.uid if variant else None)
|
msg = "Creating comps repo for arch '%s' variant '%s'" % (
|
||||||
|
arch,
|
||||||
|
variant.uid if variant else None,
|
||||||
|
)
|
||||||
|
|
||||||
compose.log_info("[BEGIN] %s" % msg)
|
compose.log_info("[BEGIN] %s" % msg)
|
||||||
cmd = repo.get_createrepo_cmd(
|
cmd = repo.get_createrepo_cmd(
|
||||||
comps_repo, database=False,
|
comps_repo,
|
||||||
|
database=False,
|
||||||
outputdir=comps_repo,
|
outputdir=comps_repo,
|
||||||
groupfile=comps_path,
|
groupfile=comps_path,
|
||||||
checksum=createrepo_checksum,
|
checksum=createrepo_checksum,
|
||||||
@ -200,7 +211,9 @@ def write_module_defaults(compose):
|
|||||||
with temp_dir(prefix="moduledefaults_") as tmp_dir:
|
with temp_dir(prefix="moduledefaults_") as tmp_dir:
|
||||||
get_dir_from_scm(scm_dict, tmp_dir, compose=compose)
|
get_dir_from_scm(scm_dict, tmp_dir, compose=compose)
|
||||||
compose.log_debug("Writing module defaults")
|
compose.log_debug("Writing module defaults")
|
||||||
shutil.copytree(tmp_dir, compose.paths.work.module_defaults_dir(create_dir=False))
|
shutil.copytree(
|
||||||
|
tmp_dir, compose.paths.work.module_defaults_dir(create_dir=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_module_defaults(path):
|
def validate_module_defaults(path):
|
||||||
|
@ -33,11 +33,14 @@ from pungi.util import get_repo_urls
|
|||||||
|
|
||||||
# HACK: define cmp in python3
|
# HACK: define cmp in python3
|
||||||
if sys.version_info[0] == 3:
|
if sys.version_info[0] == 3:
|
||||||
|
|
||||||
def cmp(a, b):
|
def cmp(a, b):
|
||||||
return (a > b) - (a < b)
|
return (a > b) - (a < b)
|
||||||
|
|
||||||
|
|
||||||
class LiveImagesPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase):
|
class LiveImagesPhase(
|
||||||
|
base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigGuardedPhase
|
||||||
|
):
|
||||||
name = "live_images"
|
name = "live_images"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
@ -48,7 +51,7 @@ class LiveImagesPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
repos = []
|
repos = []
|
||||||
if not variant.is_empty:
|
if not variant.is_empty:
|
||||||
repos.append(variant.uid)
|
repos.append(variant.uid)
|
||||||
repos.extend(force_list(data.get('repo', [])))
|
repos.extend(force_list(data.get("repo", [])))
|
||||||
return get_repo_urls(self.compose, repos, arch=arch)
|
return get_repo_urls(self.compose, repos, arch=arch)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
@ -58,27 +61,31 @@ class LiveImagesPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
for variant in self.compose.all_variants.values():
|
for variant in self.compose.all_variants.values():
|
||||||
for arch in variant.arches + ["src"]:
|
for arch in variant.arches + ["src"]:
|
||||||
for data in self.get_config_block(variant, arch):
|
for data in self.get_config_block(variant, arch):
|
||||||
subvariant = data.get('subvariant', variant.uid)
|
subvariant = data.get("subvariant", variant.uid)
|
||||||
type = data.get('type', 'live')
|
type = data.get("type", "live")
|
||||||
|
|
||||||
if type == 'live':
|
if type == "live":
|
||||||
dest_dir = self.compose.paths.compose.iso_dir(arch, variant, symlink_to=symlink_isos_to)
|
dest_dir = self.compose.paths.compose.iso_dir(
|
||||||
elif type == 'appliance':
|
arch, variant, symlink_to=symlink_isos_to
|
||||||
dest_dir = self.compose.paths.compose.image_dir(variant, symlink_to=symlink_isos_to)
|
)
|
||||||
dest_dir = dest_dir % {'arch': arch}
|
elif type == "appliance":
|
||||||
|
dest_dir = self.compose.paths.compose.image_dir(
|
||||||
|
variant, symlink_to=symlink_isos_to
|
||||||
|
)
|
||||||
|
dest_dir = dest_dir % {"arch": arch}
|
||||||
makedirs(dest_dir)
|
makedirs(dest_dir)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('Unknown live image type %s' % type)
|
raise RuntimeError("Unknown live image type %s" % type)
|
||||||
if not dest_dir:
|
if not dest_dir:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
cmd = {
|
cmd = {
|
||||||
"name": data.get('name'),
|
"name": data.get("name"),
|
||||||
"version": self.get_version(data),
|
"version": self.get_version(data),
|
||||||
"release": self.get_release(data),
|
"release": self.get_release(data),
|
||||||
"dest_dir": dest_dir,
|
"dest_dir": dest_dir,
|
||||||
"build_arch": arch,
|
"build_arch": arch,
|
||||||
"ks_file": data['kickstart'],
|
"ks_file": data["kickstart"],
|
||||||
"ksurl": self.get_ksurl(data),
|
"ksurl": self.get_ksurl(data),
|
||||||
# Used for images wrapped in RPM
|
# Used for images wrapped in RPM
|
||||||
"specfile": data.get("specfile", None),
|
"specfile": data.get("specfile", None),
|
||||||
@ -91,10 +98,11 @@ class LiveImagesPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
"type": type,
|
"type": type,
|
||||||
"label": "", # currently not used
|
"label": "", # currently not used
|
||||||
"subvariant": subvariant,
|
"subvariant": subvariant,
|
||||||
"failable_arches": data.get('failable', []),
|
"failable_arches": data.get("failable", []),
|
||||||
# First see if live_target is specified, then fall back
|
# First see if live_target is specified, then fall back
|
||||||
# to regular setup of local, phase and global setting.
|
# to regular setup of local, phase and global setting.
|
||||||
"target": self.compose.conf.get('live_target') or self.get_config(data, 'target'),
|
"target": self.compose.conf.get("live_target")
|
||||||
|
or self.get_config(data, "target"),
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd["repos"] = self._get_repos(arch, variant, data)
|
cmd["repos"] = self._get_repos(arch, variant, data)
|
||||||
@ -103,7 +111,9 @@ class LiveImagesPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
if not cmd["scratch"] and data.get("sign"):
|
if not cmd["scratch"] and data.get("sign"):
|
||||||
cmd["sign"] = True
|
cmd["sign"] = True
|
||||||
|
|
||||||
cmd['filename'] = self._get_file_name(arch, variant, cmd['name'], cmd['version'])
|
cmd["filename"] = self._get_file_name(
|
||||||
|
arch, variant, cmd["name"], cmd["version"]
|
||||||
|
)
|
||||||
|
|
||||||
commands.append((cmd, variant, arch))
|
commands.append((cmd, variant, arch))
|
||||||
|
|
||||||
@ -114,46 +124,66 @@ class LiveImagesPhase(base.PhaseLoggerMixin, base.ImageConfigMixin, base.ConfigG
|
|||||||
self.pool.start()
|
self.pool.start()
|
||||||
|
|
||||||
def _get_file_name(self, arch, variant, name=None, version=None):
|
def _get_file_name(self, arch, variant, name=None, version=None):
|
||||||
if self.compose.conf['live_images_no_rename']:
|
if self.compose.conf["live_images_no_rename"]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
disc_type = self.compose.conf['disc_types'].get('live', 'live')
|
disc_type = self.compose.conf["disc_types"].get("live", "live")
|
||||||
|
|
||||||
format = "%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
format = (
|
||||||
|
"%(compose_id)s-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
||||||
|
)
|
||||||
# Custom name (prefix)
|
# Custom name (prefix)
|
||||||
if name:
|
if name:
|
||||||
custom_iso_name = name
|
custom_iso_name = name
|
||||||
if version:
|
if version:
|
||||||
custom_iso_name += "-%s" % version
|
custom_iso_name += "-%s" % version
|
||||||
format = custom_iso_name + "-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
format = (
|
||||||
|
custom_iso_name
|
||||||
|
+ "-%(variant)s-%(arch)s-%(disc_type)s%(disc_num)s%(suffix)s"
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: hardcoded disc_num
|
# XXX: hardcoded disc_num
|
||||||
return self.compose.get_image_name(arch, variant, disc_type=disc_type,
|
return self.compose.get_image_name(
|
||||||
disc_num=None, format=format)
|
arch, variant, disc_type=disc_type, disc_num=None, format=format
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CreateLiveImageThread(WorkerThread):
|
class CreateLiveImageThread(WorkerThread):
|
||||||
EXTS = ('.iso', '.raw.xz')
|
EXTS = (".iso", ".raw.xz")
|
||||||
|
|
||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
compose, cmd, variant, arch = item
|
compose, cmd, variant, arch = item
|
||||||
self.failable_arches = cmd.get('failable_arches', [])
|
self.failable_arches = cmd.get("failable_arches", [])
|
||||||
self.can_fail = bool(self.failable_arches)
|
self.can_fail = bool(self.failable_arches)
|
||||||
with failable(compose, self.can_fail, variant, arch, 'live', cmd.get('subvariant'),
|
with failable(
|
||||||
logger=self.pool._logger):
|
compose,
|
||||||
|
self.can_fail,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
"live",
|
||||||
|
cmd.get("subvariant"),
|
||||||
|
logger=self.pool._logger,
|
||||||
|
):
|
||||||
self.worker(compose, cmd, variant, arch, num)
|
self.worker(compose, cmd, variant, arch, num)
|
||||||
|
|
||||||
def worker(self, compose, cmd, variant, arch, num):
|
def worker(self, compose, cmd, variant, arch, num):
|
||||||
self.basename = '%(name)s-%(version)s-%(release)s' % cmd
|
self.basename = "%(name)s-%(version)s-%(release)s" % cmd
|
||||||
log_file = compose.paths.log.log_file(arch, "liveimage-%s" % self.basename)
|
log_file = compose.paths.log.log_file(arch, "liveimage-%s" % self.basename)
|
||||||
|
|
||||||
subvariant = cmd.pop('subvariant')
|
subvariant = cmd.pop("subvariant")
|
||||||
|
|
||||||
imgname = "%s-%s-%s-%s" % (compose.ci_base.release.short, subvariant,
|
imgname = "%s-%s-%s-%s" % (
|
||||||
'Live' if cmd['type'] == 'live' else 'Disk',
|
compose.ci_base.release.short,
|
||||||
arch)
|
subvariant,
|
||||||
|
"Live" if cmd["type"] == "live" else "Disk",
|
||||||
|
arch,
|
||||||
|
)
|
||||||
|
|
||||||
msg = "Creating ISO (arch: %s, variant: %s): %s" % (arch, variant, self.basename)
|
msg = "Creating ISO (arch: %s, variant: %s): %s" % (
|
||||||
|
arch,
|
||||||
|
variant,
|
||||||
|
self.basename,
|
||||||
|
)
|
||||||
self.pool.log_info("[BEGIN] %s" % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
||||||
@ -164,17 +194,20 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
if cmd["specfile"] and not cmd["scratch"]:
|
if cmd["specfile"] and not cmd["scratch"]:
|
||||||
# Non scratch build are allowed only for rpm wrapped images
|
# Non scratch build are allowed only for rpm wrapped images
|
||||||
archive = True
|
archive = True
|
||||||
koji_cmd = koji_wrapper.get_create_image_cmd(name, version,
|
koji_cmd = koji_wrapper.get_create_image_cmd(
|
||||||
|
name,
|
||||||
|
version,
|
||||||
cmd["target"],
|
cmd["target"],
|
||||||
cmd["build_arch"],
|
cmd["build_arch"],
|
||||||
cmd["ks_file"],
|
cmd["ks_file"],
|
||||||
cmd["repos"],
|
cmd["repos"],
|
||||||
image_type=cmd['type'],
|
image_type=cmd["type"],
|
||||||
wait=True,
|
wait=True,
|
||||||
archive=archive,
|
archive=archive,
|
||||||
specfile=cmd["specfile"],
|
specfile=cmd["specfile"],
|
||||||
release=cmd['release'],
|
release=cmd["release"],
|
||||||
ksurl=cmd['ksurl'])
|
ksurl=cmd["ksurl"],
|
||||||
|
)
|
||||||
|
|
||||||
# avoid race conditions?
|
# avoid race conditions?
|
||||||
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
|
# Kerberos authentication failed: Permission denied in replay cache code (-1765328215)
|
||||||
@ -182,17 +215,25 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
|
|
||||||
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
output = koji_wrapper.run_blocking_cmd(koji_cmd, log_file=log_file)
|
||||||
if output["retcode"] != 0:
|
if output["retcode"] != 0:
|
||||||
raise RuntimeError("LiveImage task failed: %s. See %s for more details." % (output["task_id"], log_file))
|
raise RuntimeError(
|
||||||
|
"LiveImage task failed: %s. See %s for more details."
|
||||||
|
% (output["task_id"], log_file)
|
||||||
|
)
|
||||||
|
|
||||||
# copy finished image to isos/
|
# copy finished image to isos/
|
||||||
image_path = [path for path in koji_wrapper.get_image_path(output["task_id"])
|
image_path = [
|
||||||
if self._is_image(path)]
|
path
|
||||||
|
for path in koji_wrapper.get_image_path(output["task_id"])
|
||||||
|
if self._is_image(path)
|
||||||
|
]
|
||||||
if len(image_path) != 1:
|
if len(image_path) != 1:
|
||||||
raise RuntimeError('Got %d images from task %d, expected 1.'
|
raise RuntimeError(
|
||||||
% (len(image_path), output['task_id']))
|
"Got %d images from task %d, expected 1."
|
||||||
|
% (len(image_path), output["task_id"])
|
||||||
|
)
|
||||||
image_path = image_path[0]
|
image_path = image_path[0]
|
||||||
filename = cmd.get('filename') or os.path.basename(image_path)
|
filename = cmd.get("filename") or os.path.basename(image_path)
|
||||||
destination = os.path.join(cmd['dest_dir'], filename)
|
destination = os.path.join(cmd["dest_dir"], filename)
|
||||||
shutil.copy2(image_path, destination)
|
shutil.copy2(image_path, destination)
|
||||||
|
|
||||||
# copy finished rpm to isos/ (if rpm wrapped ISO was built)
|
# copy finished rpm to isos/ (if rpm wrapped ISO was built)
|
||||||
@ -201,27 +242,39 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
|
|
||||||
if cmd["sign"]:
|
if cmd["sign"]:
|
||||||
# Sign the rpm wrapped images and get their paths
|
# Sign the rpm wrapped images and get their paths
|
||||||
self.pool.log_info("Signing rpm wrapped images in task_id: %s (expected key ID: %s)"
|
self.pool.log_info(
|
||||||
% (output["task_id"], compose.conf.get("signing_key_id")))
|
"Signing rpm wrapped images in task_id: %s (expected key ID: %s)"
|
||||||
signed_rpm_paths = self._sign_image(koji_wrapper, compose, cmd, output["task_id"])
|
% (output["task_id"], compose.conf.get("signing_key_id"))
|
||||||
|
)
|
||||||
|
signed_rpm_paths = self._sign_image(
|
||||||
|
koji_wrapper, compose, cmd, output["task_id"]
|
||||||
|
)
|
||||||
if signed_rpm_paths:
|
if signed_rpm_paths:
|
||||||
rpm_paths = signed_rpm_paths
|
rpm_paths = signed_rpm_paths
|
||||||
|
|
||||||
for rpm_path in rpm_paths:
|
for rpm_path in rpm_paths:
|
||||||
shutil.copy2(rpm_path, cmd["dest_dir"])
|
shutil.copy2(rpm_path, cmd["dest_dir"])
|
||||||
|
|
||||||
if cmd['type'] == 'live':
|
if cmd["type"] == "live":
|
||||||
# ISO manifest only makes sense for live images
|
# ISO manifest only makes sense for live images
|
||||||
self._write_manifest(destination)
|
self._write_manifest(destination)
|
||||||
|
|
||||||
self._add_to_images(compose, variant, subvariant, arch, cmd['type'], self._get_format(image_path), destination)
|
self._add_to_images(
|
||||||
|
compose,
|
||||||
|
variant,
|
||||||
|
subvariant,
|
||||||
|
arch,
|
||||||
|
cmd["type"],
|
||||||
|
self._get_format(image_path),
|
||||||
|
destination,
|
||||||
|
)
|
||||||
|
|
||||||
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output['task_id']))
|
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
||||||
|
|
||||||
def _add_to_images(self, compose, variant, subvariant, arch, type, format, path):
|
def _add_to_images(self, compose, variant, subvariant, arch, type, format, path):
|
||||||
"""Adds the image to images.json"""
|
"""Adds the image to images.json"""
|
||||||
img = Image(compose.im)
|
img = Image(compose.im)
|
||||||
img.type = 'raw-xz' if type == 'appliance' else type
|
img.type = "raw-xz" if type == "appliance" else type
|
||||||
img.format = format
|
img.format = format
|
||||||
img.path = os.path.relpath(path, compose.paths.compose.topdir())
|
img.path = os.path.relpath(path, compose.paths.compose.topdir())
|
||||||
img.mtime = get_mtime(path)
|
img.mtime = get_mtime(path)
|
||||||
@ -231,8 +284,8 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
img.disc_count = 1
|
img.disc_count = 1
|
||||||
img.bootable = True
|
img.bootable = True
|
||||||
img.subvariant = subvariant
|
img.subvariant = subvariant
|
||||||
setattr(img, 'can_fail', self.can_fail)
|
setattr(img, "can_fail", self.can_fail)
|
||||||
setattr(img, 'deliverable', 'live')
|
setattr(img, "deliverable", "live")
|
||||||
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
compose.im.add(variant=variant.uid, arch=arch, image=img)
|
||||||
|
|
||||||
def _is_image(self, path):
|
def _is_image(self, path):
|
||||||
@ -246,7 +299,7 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
for ext in self.EXTS:
|
for ext in self.EXTS:
|
||||||
if path.endswith(ext):
|
if path.endswith(ext):
|
||||||
return ext[1:]
|
return ext[1:]
|
||||||
raise RuntimeError('Getting format for unknown image %s' % path)
|
raise RuntimeError("Getting format for unknown image %s" % path)
|
||||||
|
|
||||||
def _write_manifest(self, iso_path):
|
def _write_manifest(self, iso_path):
|
||||||
"""Generate manifest for ISO at given path.
|
"""Generate manifest for ISO at given path.
|
||||||
@ -261,30 +314,43 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
signing_command = compose.conf.get("signing_command")
|
signing_command = compose.conf.get("signing_command")
|
||||||
|
|
||||||
if not signing_key_id:
|
if not signing_key_id:
|
||||||
self.pool.log_warning("Signing is enabled but signing_key_id is not specified")
|
self.pool.log_warning(
|
||||||
|
"Signing is enabled but signing_key_id is not specified"
|
||||||
|
)
|
||||||
self.pool.log_warning("Signing skipped")
|
self.pool.log_warning("Signing skipped")
|
||||||
return None
|
return None
|
||||||
if not signing_command:
|
if not signing_command:
|
||||||
self.pool.log_warning("Signing is enabled but signing_command is not specified")
|
self.pool.log_warning(
|
||||||
|
"Signing is enabled but signing_command is not specified"
|
||||||
|
)
|
||||||
self.pool.log_warning("Signing skipped")
|
self.pool.log_warning("Signing skipped")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Prepare signing log file
|
# Prepare signing log file
|
||||||
signing_log_file = compose.paths.log.log_file(cmd["build_arch"],
|
signing_log_file = compose.paths.log.log_file(
|
||||||
"live_images-signing-%s" % self.basename)
|
cmd["build_arch"], "live_images-signing-%s" % self.basename
|
||||||
|
)
|
||||||
|
|
||||||
# Sign the rpm wrapped images
|
# Sign the rpm wrapped images
|
||||||
try:
|
try:
|
||||||
sign_builds_in_task(koji_wrapper, koji_task_id, signing_command,
|
sign_builds_in_task(
|
||||||
|
koji_wrapper,
|
||||||
|
koji_task_id,
|
||||||
|
signing_command,
|
||||||
log_file=signing_log_file,
|
log_file=signing_log_file,
|
||||||
signing_key_password=compose.conf.get("signing_key_password"))
|
signing_key_password=compose.conf.get("signing_key_password"),
|
||||||
|
)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
self.pool.log_error("Error while signing rpm wrapped images. See log: %s" % signing_log_file)
|
self.pool.log_error(
|
||||||
|
"Error while signing rpm wrapped images. See log: %s" % signing_log_file
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# Get pats to the signed rpms
|
# Get pats to the signed rpms
|
||||||
signing_key_id = signing_key_id.lower() # Koji uses lowercase in paths
|
signing_key_id = signing_key_id.lower() # Koji uses lowercase in paths
|
||||||
rpm_paths = koji_wrapper.get_signed_wrapped_rpms_paths(koji_task_id, signing_key_id)
|
rpm_paths = koji_wrapper.get_signed_wrapped_rpms_paths(
|
||||||
|
koji_task_id, signing_key_id
|
||||||
|
)
|
||||||
|
|
||||||
# Wait untill files are available
|
# Wait untill files are available
|
||||||
if wait_paths(rpm_paths, 60 * 15):
|
if wait_paths(rpm_paths, 60 * 15):
|
||||||
@ -312,7 +378,9 @@ def wait_paths(paths, timeout=60):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def sign_builds_in_task(koji_wrapper, task_id, signing_command, log_file=None, signing_key_password=None):
|
def sign_builds_in_task(
|
||||||
|
koji_wrapper, task_id, signing_command, log_file=None, signing_key_password=None
|
||||||
|
):
|
||||||
# Get list of nvrs that should be signed
|
# Get list of nvrs that should be signed
|
||||||
nvrs = koji_wrapper.get_build_nvrs(task_id)
|
nvrs = koji_wrapper.get_build_nvrs(task_id)
|
||||||
if not nvrs:
|
if not nvrs:
|
||||||
@ -329,7 +397,9 @@ def sign_builds_in_task(koji_wrapper, task_id, signing_command, log_file=None, s
|
|||||||
|
|
||||||
# Fill password into the signing command
|
# Fill password into the signing command
|
||||||
if signing_key_password:
|
if signing_key_password:
|
||||||
signing_command = signing_command % {"signing_key_password": signing_key_password}
|
signing_command = signing_command % {
|
||||||
|
"signing_key_password": signing_key_password
|
||||||
|
}
|
||||||
|
|
||||||
# Sign the builds
|
# Sign the builds
|
||||||
run(signing_command, can_fail=False, show_cmd=False, logfile=log_file)
|
run(signing_command, can_fail=False, show_cmd=False, logfile=log_file)
|
||||||
|
@ -15,7 +15,8 @@ from productmd.images import Image
|
|||||||
|
|
||||||
class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
||||||
"""class for wrapping up koji spin-livemedia"""
|
"""class for wrapping up koji spin-livemedia"""
|
||||||
name = 'live_media'
|
|
||||||
|
name = "live_media"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
super(LiveMediaPhase, self).__init__(compose)
|
super(LiveMediaPhase, self).__init__(compose)
|
||||||
@ -26,7 +27,7 @@ class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
|||||||
Get a list of repo urls. First included are those explicitly listed in config,
|
Get a list of repo urls. First included are those explicitly listed in config,
|
||||||
followed by repo for current variant if it's not present in the list.
|
followed by repo for current variant if it's not present in the list.
|
||||||
"""
|
"""
|
||||||
repos = shortcuts.force_list(image_conf.get('repo', []))
|
repos = shortcuts.force_list(image_conf.get("repo", []))
|
||||||
|
|
||||||
if not variant.is_empty:
|
if not variant.is_empty:
|
||||||
if variant.uid not in repos:
|
if variant.uid not in repos:
|
||||||
@ -35,49 +36,52 @@ class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
|||||||
return get_repo_urls(self.compose, repos)
|
return get_repo_urls(self.compose, repos)
|
||||||
|
|
||||||
def _get_arches(self, image_conf, arches):
|
def _get_arches(self, image_conf, arches):
|
||||||
if 'arches' in image_conf:
|
if "arches" in image_conf:
|
||||||
arches = set(image_conf.get('arches', [])) & arches
|
arches = set(image_conf.get("arches", [])) & arches
|
||||||
return sorted(arches)
|
return sorted(arches)
|
||||||
|
|
||||||
def _get_install_tree(self, image_conf, variant):
|
def _get_install_tree(self, image_conf, variant):
|
||||||
if 'install_tree_from' in image_conf:
|
if "install_tree_from" in image_conf:
|
||||||
variant_uid = image_conf['install_tree_from']
|
variant_uid = image_conf["install_tree_from"]
|
||||||
try:
|
try:
|
||||||
variant = self.compose.all_variants[variant_uid]
|
variant = self.compose.all_variants[variant_uid]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
'There is no variant %s to get repo from when building live media for %s.'
|
"There is no variant %s to get repo from when building live media for %s."
|
||||||
% (variant_uid, variant.uid))
|
% (variant_uid, variant.uid)
|
||||||
|
)
|
||||||
return translate_path(
|
return translate_path(
|
||||||
self.compose,
|
self.compose,
|
||||||
self.compose.paths.compose.os_tree('$basearch', variant, create_dir=False)
|
self.compose.paths.compose.os_tree("$basearch", variant, create_dir=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for variant in self.compose.get_variants():
|
for variant in self.compose.get_variants():
|
||||||
arches = set([x for x in variant.arches if x != 'src'])
|
arches = set([x for x in variant.arches if x != "src"])
|
||||||
for image_conf in self.get_config_block(variant):
|
for image_conf in self.get_config_block(variant):
|
||||||
subvariant = image_conf.get('subvariant', variant.uid)
|
subvariant = image_conf.get("subvariant", variant.uid)
|
||||||
name = image_conf.get(
|
name = image_conf.get(
|
||||||
'name', "%s-%s-Live" % (self.compose.ci_base.release.short, subvariant))
|
"name",
|
||||||
|
"%s-%s-Live" % (self.compose.ci_base.release.short, subvariant),
|
||||||
|
)
|
||||||
config = {
|
config = {
|
||||||
'target': self.get_config(image_conf, 'target'),
|
"target": self.get_config(image_conf, "target"),
|
||||||
'arches': self._get_arches(image_conf, arches),
|
"arches": self._get_arches(image_conf, arches),
|
||||||
'ksfile': image_conf['kickstart'],
|
"ksfile": image_conf["kickstart"],
|
||||||
'ksurl': self.get_ksurl(image_conf),
|
"ksurl": self.get_ksurl(image_conf),
|
||||||
'ksversion': image_conf.get('ksversion'),
|
"ksversion": image_conf.get("ksversion"),
|
||||||
'scratch': image_conf.get('scratch', False),
|
"scratch": image_conf.get("scratch", False),
|
||||||
'release': self.get_release(image_conf),
|
"release": self.get_release(image_conf),
|
||||||
'skip_tag': image_conf.get('skip_tag'),
|
"skip_tag": image_conf.get("skip_tag"),
|
||||||
'name': name,
|
"name": name,
|
||||||
'subvariant': subvariant,
|
"subvariant": subvariant,
|
||||||
'repo': self._get_repos(image_conf, variant),
|
"repo": self._get_repos(image_conf, variant),
|
||||||
'install_tree': self._get_install_tree(image_conf, variant),
|
"install_tree": self._get_install_tree(image_conf, variant),
|
||||||
'version': self.get_version(image_conf),
|
"version": self.get_version(image_conf),
|
||||||
'failable_arches': image_conf.get('failable', []),
|
"failable_arches": image_conf.get("failable", []),
|
||||||
}
|
}
|
||||||
if config['failable_arches'] == ['*']:
|
if config["failable_arches"] == ["*"]:
|
||||||
config['failable_arches'] = config['arches']
|
config["failable_arches"] = config["arches"]
|
||||||
self.pool.add(LiveMediaThread(self.pool))
|
self.pool.add(LiveMediaThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, variant, config))
|
self.pool.queue_put((self.compose, variant, config))
|
||||||
|
|
||||||
@ -87,42 +91,56 @@ class LiveMediaPhase(PhaseLoggerMixin, ImageConfigMixin, ConfigGuardedPhase):
|
|||||||
class LiveMediaThread(WorkerThread):
|
class LiveMediaThread(WorkerThread):
|
||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
compose, variant, config = item
|
compose, variant, config = item
|
||||||
subvariant = config.pop('subvariant')
|
subvariant = config.pop("subvariant")
|
||||||
self.failable_arches = config.pop('failable_arches')
|
self.failable_arches = config.pop("failable_arches")
|
||||||
self.num = num
|
self.num = num
|
||||||
can_fail = set(self.failable_arches) == set(config['arches'])
|
can_fail = set(self.failable_arches) == set(config["arches"])
|
||||||
with failable(compose, can_fail, variant, '*', 'live-media', subvariant,
|
with failable(
|
||||||
logger=self.pool._logger):
|
compose,
|
||||||
|
can_fail,
|
||||||
|
variant,
|
||||||
|
"*",
|
||||||
|
"live-media",
|
||||||
|
subvariant,
|
||||||
|
logger=self.pool._logger,
|
||||||
|
):
|
||||||
self.worker(compose, variant, subvariant, config)
|
self.worker(compose, variant, subvariant, config)
|
||||||
|
|
||||||
def _get_log_file(self, compose, variant, subvariant, config):
|
def _get_log_file(self, compose, variant, subvariant, config):
|
||||||
arches = '-'.join(config['arches'])
|
arches = "-".join(config["arches"])
|
||||||
return compose.paths.log.log_file(arches, 'livemedia-%s-%s'
|
return compose.paths.log.log_file(
|
||||||
% (variant.uid, subvariant))
|
arches, "livemedia-%s-%s" % (variant.uid, subvariant)
|
||||||
|
)
|
||||||
|
|
||||||
def _run_command(self, koji_wrapper, cmd, compose, log_file):
|
def _run_command(self, koji_wrapper, cmd, compose, log_file):
|
||||||
time.sleep(self.num * 3)
|
time.sleep(self.num * 3)
|
||||||
output = koji_wrapper.run_blocking_cmd(cmd, log_file=log_file)
|
output = koji_wrapper.run_blocking_cmd(cmd, log_file=log_file)
|
||||||
self.pool.log_debug('live media outputs: %s' % (output))
|
self.pool.log_debug("live media outputs: %s" % (output))
|
||||||
if output['retcode'] != 0:
|
if output["retcode"] != 0:
|
||||||
self.pool.log_error('Live media task failed.')
|
self.pool.log_error("Live media task failed.")
|
||||||
raise RuntimeError('Live media task failed: %s. See %s for more details.'
|
raise RuntimeError(
|
||||||
% (output['task_id'], log_file))
|
"Live media task failed: %s. See %s for more details."
|
||||||
|
% (output["task_id"], log_file)
|
||||||
|
)
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def _get_cmd(self, koji_wrapper, config):
|
def _get_cmd(self, koji_wrapper, config):
|
||||||
"""Replace `arches` (as list) with `arch` as a comma-separated string."""
|
"""Replace `arches` (as list) with `arch` as a comma-separated string."""
|
||||||
copy = dict(config)
|
copy = dict(config)
|
||||||
copy['arch'] = ','.join(copy.pop('arches', []))
|
copy["arch"] = ",".join(copy.pop("arches", []))
|
||||||
copy['can_fail'] = self.failable_arches
|
copy["can_fail"] = self.failable_arches
|
||||||
return koji_wrapper.get_live_media_cmd(copy)
|
return koji_wrapper.get_live_media_cmd(copy)
|
||||||
|
|
||||||
def worker(self, compose, variant, subvariant, config):
|
def worker(self, compose, variant, subvariant, config):
|
||||||
msg = ('Live media: %s (arches: %s, variant: %s, subvariant: %s)'
|
msg = "Live media: %s (arches: %s, variant: %s, subvariant: %s)" % (
|
||||||
% (config['name'], ' '.join(config['arches']), variant.uid, subvariant))
|
config["name"],
|
||||||
self.pool.log_info('[BEGIN] %s' % msg)
|
" ".join(config["arches"]),
|
||||||
|
variant.uid,
|
||||||
|
subvariant,
|
||||||
|
)
|
||||||
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
|
|
||||||
koji_wrapper = KojiWrapper(compose.conf['koji_profile'])
|
koji_wrapper = KojiWrapper(compose.conf["koji_profile"])
|
||||||
cmd = self._get_cmd(koji_wrapper, config)
|
cmd = self._get_cmd(koji_wrapper, config)
|
||||||
|
|
||||||
log_file = self._get_log_file(compose, variant, subvariant, config)
|
log_file = self._get_log_file(compose, variant, subvariant, config)
|
||||||
@ -132,51 +150,54 @@ class LiveMediaThread(WorkerThread):
|
|||||||
image_infos = []
|
image_infos = []
|
||||||
|
|
||||||
paths = koji_wrapper.get_image_paths(
|
paths = koji_wrapper.get_image_paths(
|
||||||
output['task_id'],
|
output["task_id"],
|
||||||
callback=lambda arch: log_failed_task(compose, variant, arch, 'live-media', subvariant)
|
callback=lambda arch: log_failed_task(
|
||||||
|
compose, variant, arch, "live-media", subvariant
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
for arch, paths in paths.items():
|
for arch, paths in paths.items():
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if path.endswith('.iso'):
|
if path.endswith(".iso"):
|
||||||
image_infos.append({'path': path, 'arch': arch})
|
image_infos.append({"path": path, "arch": arch})
|
||||||
|
|
||||||
if len(image_infos) < len(config['arches']) - len(self.failable_arches):
|
if len(image_infos) < len(config["arches"]) - len(self.failable_arches):
|
||||||
self.pool.log_error(
|
self.pool.log_error(
|
||||||
'Error in koji task %s. Expected to find at least one image '
|
"Error in koji task %s. Expected to find at least one image "
|
||||||
'for each required arch (%s). Got %s.'
|
"for each required arch (%s). Got %s."
|
||||||
% (output['task_id'], len(config['arches']), len(image_infos)))
|
% (output["task_id"], len(config["arches"]), len(image_infos))
|
||||||
raise RuntimeError('Image count mismatch in task %s.' % output['task_id'])
|
)
|
||||||
|
raise RuntimeError("Image count mismatch in task %s." % output["task_id"])
|
||||||
|
|
||||||
linker = Linker(logger=self.pool._logger)
|
linker = Linker(logger=self.pool._logger)
|
||||||
link_type = compose.conf["link_type"]
|
link_type = compose.conf["link_type"]
|
||||||
for image_info in image_infos:
|
for image_info in image_infos:
|
||||||
image_dir = compose.paths.compose.iso_dir(image_info['arch'], variant)
|
image_dir = compose.paths.compose.iso_dir(image_info["arch"], variant)
|
||||||
makedirs(image_dir)
|
makedirs(image_dir)
|
||||||
relative_image_dir = (
|
relative_image_dir = compose.paths.compose.iso_dir(
|
||||||
compose.paths.compose.iso_dir(image_info['arch'], variant, relative=True)
|
image_info["arch"], variant, relative=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info['path']))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = os.path.realpath(image_info["path"])
|
src_file = os.path.realpath(image_info["path"])
|
||||||
linker.link(src_file, image_dest, link_type=link_type)
|
linker.link(src_file, image_dest, link_type=link_type)
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
img = Image(compose.im)
|
img = Image(compose.im)
|
||||||
img.type = 'live'
|
img.type = "live"
|
||||||
img.format = 'iso'
|
img.format = "iso"
|
||||||
img.path = os.path.join(relative_image_dir, os.path.basename(image_dest))
|
img.path = os.path.join(relative_image_dir, os.path.basename(image_dest))
|
||||||
img.mtime = get_mtime(image_dest)
|
img.mtime = get_mtime(image_dest)
|
||||||
img.size = get_file_size(image_dest)
|
img.size = get_file_size(image_dest)
|
||||||
img.arch = image_info['arch']
|
img.arch = image_info["arch"]
|
||||||
img.disc_number = 1 # We don't expect multiple disks
|
img.disc_number = 1 # We don't expect multiple disks
|
||||||
img.disc_count = 1
|
img.disc_count = 1
|
||||||
img.bootable = True
|
img.bootable = True
|
||||||
img.subvariant = subvariant
|
img.subvariant = subvariant
|
||||||
setattr(img, 'can_fail', bool(self.failable_arches))
|
setattr(img, "can_fail", bool(self.failable_arches))
|
||||||
setattr(img, 'deliverable', 'live-media')
|
setattr(img, "deliverable", "live-media")
|
||||||
compose.im.add(variant=variant.uid, arch=image_info['arch'], image=img)
|
compose.im.add(variant=variant.uid, arch=image_info["arch"], image=img)
|
||||||
|
|
||||||
self.pool.log_info('[DONE ] %s (task id: %s)' % (msg, output['task_id']))
|
self.pool.log_info("[DONE ] %s (task id: %s)" % (msg, output["task_id"]))
|
||||||
|
@ -12,7 +12,7 @@ from ..wrappers import kojiwrapper
|
|||||||
|
|
||||||
|
|
||||||
class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||||
name = 'osbs'
|
name = "osbs"
|
||||||
|
|
||||||
def __init__(self, compose):
|
def __init__(self, compose):
|
||||||
super(OSBSPhase, self).__init__(compose)
|
super(OSBSPhase, self).__init__(compose)
|
||||||
@ -32,9 +32,10 @@ class OSBSPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
|||||||
"""Create a file with image metadata if the phase actually ran."""
|
"""Create a file with image metadata if the phase actually ran."""
|
||||||
if self._skipped:
|
if self._skipped:
|
||||||
return
|
return
|
||||||
with open(self.compose.paths.compose.metadata('osbs.json'), 'w') as f:
|
with open(self.compose.paths.compose.metadata("osbs.json"), "w") as f:
|
||||||
json.dump(self.pool.metadata, f, indent=4, sort_keys=True,
|
json.dump(
|
||||||
separators=(',', ': '))
|
self.pool.metadata, f, indent=4, sort_keys=True, separators=(",", ": ")
|
||||||
|
)
|
||||||
|
|
||||||
def request_push(self):
|
def request_push(self):
|
||||||
"""Store configuration data about where to push the created images and
|
"""Store configuration data about where to push the created images and
|
||||||
@ -73,100 +74,117 @@ class OSBSThread(WorkerThread):
|
|||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
compose, variant, config = item
|
compose, variant, config = item
|
||||||
self.num = num
|
self.num = num
|
||||||
with util.failable(compose, bool(config.pop('failable', None)), variant, '*', 'osbs',
|
with util.failable(
|
||||||
logger=self.pool._logger):
|
compose,
|
||||||
|
bool(config.pop("failable", None)),
|
||||||
|
variant,
|
||||||
|
"*",
|
||||||
|
"osbs",
|
||||||
|
logger=self.pool._logger,
|
||||||
|
):
|
||||||
self.worker(compose, variant, config)
|
self.worker(compose, variant, config)
|
||||||
|
|
||||||
def worker(self, compose, variant, config):
|
def worker(self, compose, variant, config):
|
||||||
msg = 'OSBS task for variant %s' % variant.uid
|
msg = "OSBS task for variant %s" % variant.uid
|
||||||
self.pool.log_info('[BEGIN] %s' % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
koji = kojiwrapper.KojiWrapper(compose.conf['koji_profile'])
|
koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
||||||
koji.login()
|
koji.login()
|
||||||
|
|
||||||
# Start task
|
# Start task
|
||||||
source = config.pop('url')
|
source = config.pop("url")
|
||||||
target = config.pop('target')
|
target = config.pop("target")
|
||||||
priority = config.pop('priority', None)
|
priority = config.pop("priority", None)
|
||||||
gpgkey = config.pop('gpgkey', None)
|
gpgkey = config.pop("gpgkey", None)
|
||||||
repos = [self._get_repo(compose, v, gpgkey=gpgkey)
|
repos = [
|
||||||
for v in [variant.uid] + shortcuts.force_list(config.pop('repo', []))]
|
self._get_repo(compose, v, gpgkey=gpgkey)
|
||||||
|
for v in [variant.uid] + shortcuts.force_list(config.pop("repo", []))
|
||||||
|
]
|
||||||
# Deprecated in 4.1.36
|
# Deprecated in 4.1.36
|
||||||
registry = config.pop("registry", None)
|
registry = config.pop("registry", None)
|
||||||
|
|
||||||
config['yum_repourls'] = repos
|
config["yum_repourls"] = repos
|
||||||
|
|
||||||
task_id = koji.koji_proxy.buildContainer(source, target, config,
|
task_id = koji.koji_proxy.buildContainer(
|
||||||
priority=priority)
|
source, target, config, priority=priority
|
||||||
|
)
|
||||||
|
|
||||||
# Wait for it to finish and capture the output into log file (even
|
# Wait for it to finish and capture the output into log file (even
|
||||||
# though there is not much there).
|
# though there is not much there).
|
||||||
log_dir = os.path.join(compose.paths.log.topdir(), 'osbs')
|
log_dir = os.path.join(compose.paths.log.topdir(), "osbs")
|
||||||
util.makedirs(log_dir)
|
util.makedirs(log_dir)
|
||||||
log_file = os.path.join(log_dir, '%s-%s-watch-task.log'
|
log_file = os.path.join(
|
||||||
% (variant.uid, self.num))
|
log_dir, "%s-%s-watch-task.log" % (variant.uid, self.num)
|
||||||
|
)
|
||||||
if koji.watch_task(task_id, log_file) != 0:
|
if koji.watch_task(task_id, log_file) != 0:
|
||||||
raise RuntimeError('OSBS: task %s failed: see %s for details'
|
raise RuntimeError(
|
||||||
% (task_id, log_file))
|
"OSBS: task %s failed: see %s for details" % (task_id, log_file)
|
||||||
|
)
|
||||||
|
|
||||||
scratch = config.get('scratch', False)
|
scratch = config.get("scratch", False)
|
||||||
nvr = self._add_metadata(variant, task_id, compose, scratch)
|
nvr = self._add_metadata(variant, task_id, compose, scratch)
|
||||||
if nvr:
|
if nvr:
|
||||||
registry = get_registry(compose, nvr, registry)
|
registry = get_registry(compose, nvr, registry)
|
||||||
if registry:
|
if registry:
|
||||||
self.pool.registries[nvr] = registry
|
self.pool.registries[nvr] = registry
|
||||||
|
|
||||||
self.pool.log_info('[DONE ] %s' % msg)
|
self.pool.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
def _add_metadata(self, variant, task_id, compose, is_scratch):
|
def _add_metadata(self, variant, task_id, compose, is_scratch):
|
||||||
# Create new Koji session. The task could take so long to finish that
|
# Create new Koji session. The task could take so long to finish that
|
||||||
# our session will expire. This second session does not need to be
|
# our session will expire. This second session does not need to be
|
||||||
# authenticated since it will only do reading operations.
|
# authenticated since it will only do reading operations.
|
||||||
koji = kojiwrapper.KojiWrapper(compose.conf['koji_profile'])
|
koji = kojiwrapper.KojiWrapper(compose.conf["koji_profile"])
|
||||||
|
|
||||||
# Create metadata
|
# Create metadata
|
||||||
metadata = {
|
metadata = {
|
||||||
'compose_id': compose.compose_id,
|
"compose_id": compose.compose_id,
|
||||||
'koji_task': task_id,
|
"koji_task": task_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
result = koji.koji_proxy.getTaskResult(task_id)
|
result = koji.koji_proxy.getTaskResult(task_id)
|
||||||
if is_scratch:
|
if is_scratch:
|
||||||
metadata.update({
|
metadata.update(
|
||||||
'repositories': result['repositories'],
|
{"repositories": result["repositories"]}
|
||||||
})
|
)
|
||||||
# add a fake arch of 'scratch', so we can construct the metadata
|
# add a fake arch of 'scratch', so we can construct the metadata
|
||||||
# in same data structure as real builds.
|
# in same data structure as real builds.
|
||||||
self.pool.metadata.setdefault(
|
self.pool.metadata.setdefault(variant.uid, {}).setdefault(
|
||||||
variant.uid, {}).setdefault('scratch', []).append(metadata)
|
"scratch", []
|
||||||
|
).append(metadata)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
else:
|
else:
|
||||||
build_id = int(result['koji_builds'][0])
|
build_id = int(result["koji_builds"][0])
|
||||||
buildinfo = koji.koji_proxy.getBuild(build_id)
|
buildinfo = koji.koji_proxy.getBuild(build_id)
|
||||||
archives = koji.koji_proxy.listArchives(build_id)
|
archives = koji.koji_proxy.listArchives(build_id)
|
||||||
|
|
||||||
nvr = "%(name)s-%(version)s-%(release)s" % buildinfo
|
nvr = "%(name)s-%(version)s-%(release)s" % buildinfo
|
||||||
|
|
||||||
metadata.update({
|
metadata.update(
|
||||||
'name': buildinfo['name'],
|
{
|
||||||
'version': buildinfo['version'],
|
"name": buildinfo["name"],
|
||||||
'release': buildinfo['release'],
|
"version": buildinfo["version"],
|
||||||
'nvr': nvr,
|
"release": buildinfo["release"],
|
||||||
'creation_time': buildinfo['creation_time'],
|
"nvr": nvr,
|
||||||
})
|
"creation_time": buildinfo["creation_time"],
|
||||||
|
}
|
||||||
|
)
|
||||||
for archive in archives:
|
for archive in archives:
|
||||||
data = {
|
data = {
|
||||||
'filename': archive['filename'],
|
"filename": archive["filename"],
|
||||||
'size': archive['size'],
|
"size": archive["size"],
|
||||||
'checksum': archive['checksum'],
|
"checksum": archive["checksum"],
|
||||||
}
|
}
|
||||||
data.update(archive['extra'])
|
data.update(archive["extra"])
|
||||||
data.update(metadata)
|
data.update(metadata)
|
||||||
arch = archive['extra']['image']['arch']
|
arch = archive["extra"]["image"]["arch"]
|
||||||
self.pool.log_debug('Created Docker base image %s-%s-%s.%s' % (
|
self.pool.log_debug(
|
||||||
metadata['name'], metadata['version'], metadata['release'], arch))
|
"Created Docker base image %s-%s-%s.%s"
|
||||||
self.pool.metadata.setdefault(
|
% (metadata["name"], metadata["version"], metadata["release"], arch)
|
||||||
variant.uid, {}).setdefault(arch, []).append(data)
|
)
|
||||||
|
self.pool.metadata.setdefault(variant.uid, {}).setdefault(
|
||||||
|
arch, []
|
||||||
|
).append(data)
|
||||||
return nvr
|
return nvr
|
||||||
|
|
||||||
def _get_repo(self, compose, repo, gpgkey=None):
|
def _get_repo(self, compose, repo, gpgkey=None):
|
||||||
@ -201,17 +219,17 @@ class OSBSThread(WorkerThread):
|
|||||||
|
|
||||||
repo_file = os.path.join(
|
repo_file = os.path.join(
|
||||||
compose.paths.work.tmp_dir(None, variant),
|
compose.paths.work.tmp_dir(None, variant),
|
||||||
'compose-rpms-%s-%s.repo' % (variant, self.num),
|
"compose-rpms-%s-%s.repo" % (variant, self.num),
|
||||||
)
|
)
|
||||||
|
|
||||||
gpgcheck = 1 if gpgkey else 0
|
gpgcheck = 1 if gpgkey else 0
|
||||||
with open(repo_file, 'w') as f:
|
with open(repo_file, "w") as f:
|
||||||
f.write('[%s-%s-%s]\n' % (compose.compose_id, variant, self.num))
|
f.write("[%s-%s-%s]\n" % (compose.compose_id, variant, self.num))
|
||||||
f.write('name=Compose %s (RPMs) - %s\n' % (compose.compose_id, variant))
|
f.write("name=Compose %s (RPMs) - %s\n" % (compose.compose_id, variant))
|
||||||
f.write('baseurl=%s\n' % util.translate_path(compose, repo_path))
|
f.write("baseurl=%s\n" % util.translate_path(compose, repo_path))
|
||||||
f.write('enabled=1\n')
|
f.write("enabled=1\n")
|
||||||
f.write('gpgcheck=%s\n' % gpgcheck)
|
f.write("gpgcheck=%s\n" % gpgcheck)
|
||||||
if gpgcheck:
|
if gpgcheck:
|
||||||
f.write('gpgkey=%s\n' % gpgkey)
|
f.write("gpgkey=%s\n" % gpgkey)
|
||||||
|
|
||||||
return util.translate_path(compose, repo_file)
|
return util.translate_path(compose, repo_file)
|
||||||
|
@ -16,7 +16,7 @@ from ..wrappers import scm
|
|||||||
|
|
||||||
|
|
||||||
class OSTreePhase(ConfigGuardedPhase):
|
class OSTreePhase(ConfigGuardedPhase):
|
||||||
name = 'ostree'
|
name = "ostree"
|
||||||
|
|
||||||
def __init__(self, compose, pkgset_phase=None):
|
def __init__(self, compose, pkgset_phase=None):
|
||||||
super(OSTreePhase, self).__init__(compose)
|
super(OSTreePhase, self).__init__(compose)
|
||||||
@ -40,7 +40,7 @@ class OSTreePhase(ConfigGuardedPhase):
|
|||||||
if isinstance(self.compose.conf.get(self.name), dict):
|
if isinstance(self.compose.conf.get(self.name), dict):
|
||||||
for variant in self.compose.get_variants():
|
for variant in self.compose.get_variants():
|
||||||
for conf in self.get_config_block(variant):
|
for conf in self.get_config_block(variant):
|
||||||
for arch in conf.get('arches', []) or variant.arches:
|
for arch in conf.get("arches", []) or variant.arches:
|
||||||
self._enqueue(variant, arch, conf)
|
self._enqueue(variant, arch, conf)
|
||||||
else:
|
else:
|
||||||
# Legacy code path to support original configuration.
|
# Legacy code path to support original configuration.
|
||||||
@ -60,22 +60,31 @@ class OSTreeThread(WorkerThread):
|
|||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
compose, variant, arch, config = item
|
compose, variant, arch, config = item
|
||||||
self.num = num
|
self.num = num
|
||||||
failable_arches = config.get('failable', [])
|
failable_arches = config.get("failable", [])
|
||||||
with util.failable(compose, util.can_arch_fail(failable_arches, arch),
|
with util.failable(
|
||||||
variant, arch, 'ostree'):
|
compose, util.can_arch_fail(failable_arches, arch), variant, arch, "ostree"
|
||||||
|
):
|
||||||
self.worker(compose, variant, arch, config)
|
self.worker(compose, variant, arch, config)
|
||||||
|
|
||||||
def worker(self, compose, variant, arch, config):
|
def worker(self, compose, variant, arch, config):
|
||||||
msg = 'OSTree phase for variant %s, arch %s' % (variant.uid, arch)
|
msg = "OSTree phase for variant %s, arch %s" % (variant.uid, arch)
|
||||||
self.pool.log_info('[BEGIN] %s' % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
workdir = compose.paths.work.topdir('ostree-%d' % self.num)
|
workdir = compose.paths.work.topdir("ostree-%d" % self.num)
|
||||||
self.logdir = compose.paths.log.topdir('%s/%s/ostree-%d' %
|
self.logdir = compose.paths.log.topdir(
|
||||||
(arch, variant.uid, self.num))
|
"%s/%s/ostree-%d" % (arch, variant.uid, self.num)
|
||||||
repodir = os.path.join(workdir, 'config_repo')
|
)
|
||||||
self._clone_repo(compose, repodir, config['config_url'], config.get('config_branch', 'master'))
|
repodir = os.path.join(workdir, "config_repo")
|
||||||
|
self._clone_repo(
|
||||||
|
compose,
|
||||||
|
repodir,
|
||||||
|
config["config_url"],
|
||||||
|
config.get("config_branch", "master"),
|
||||||
|
)
|
||||||
|
|
||||||
comps_repo = compose.paths.work.comps_repo('$basearch', variant=variant, create_dir=False)
|
comps_repo = compose.paths.work.comps_repo(
|
||||||
repos = shortcuts.force_list(config['repo']) + self.repos
|
"$basearch", variant=variant, create_dir=False
|
||||||
|
)
|
||||||
|
repos = shortcuts.force_list(config["repo"]) + self.repos
|
||||||
if compose.has_comps:
|
if compose.has_comps:
|
||||||
repos.append(translate_path(compose, comps_repo))
|
repos.append(translate_path(compose, comps_repo))
|
||||||
repos = get_repo_dicts(repos, logger=self.pool)
|
repos = get_repo_dicts(repos, logger=self.pool)
|
||||||
@ -85,27 +94,35 @@ class OSTreeThread(WorkerThread):
|
|||||||
|
|
||||||
# repos in configuration can have repo url set to variant UID,
|
# repos in configuration can have repo url set to variant UID,
|
||||||
# update it to have the actual url that we just translated.
|
# update it to have the actual url that we just translated.
|
||||||
new_config.update({'repo': repos})
|
new_config.update({"repo": repos})
|
||||||
|
|
||||||
# remove unnecessary (for 'pungi-make-ostree tree' script ) elements
|
# remove unnecessary (for 'pungi-make-ostree tree' script ) elements
|
||||||
# from config, it doesn't hurt to have them, however remove them can
|
# from config, it doesn't hurt to have them, however remove them can
|
||||||
# reduce confusion
|
# reduce confusion
|
||||||
for k in ['ostree_repo', 'treefile', 'config_url', 'config_branch',
|
for k in [
|
||||||
'failable', 'version', 'update_summary']:
|
"ostree_repo",
|
||||||
|
"treefile",
|
||||||
|
"config_url",
|
||||||
|
"config_branch",
|
||||||
|
"failable",
|
||||||
|
"version",
|
||||||
|
"update_summary",
|
||||||
|
]:
|
||||||
new_config.pop(k, None)
|
new_config.pop(k, None)
|
||||||
|
|
||||||
# write a json file to save the configuration, so 'pungi-make-ostree tree'
|
# write a json file to save the configuration, so 'pungi-make-ostree tree'
|
||||||
# can take use of it
|
# can take use of it
|
||||||
extra_config_file = os.path.join(workdir, 'extra_config.json')
|
extra_config_file = os.path.join(workdir, "extra_config.json")
|
||||||
with open(extra_config_file, 'w') as f:
|
with open(extra_config_file, "w") as f:
|
||||||
json.dump(new_config, f, indent=4)
|
json.dump(new_config, f, indent=4)
|
||||||
|
|
||||||
# Ensure target directory exists, otherwise Koji task will fail to
|
# Ensure target directory exists, otherwise Koji task will fail to
|
||||||
# mount it.
|
# mount it.
|
||||||
util.makedirs(config['ostree_repo'])
|
util.makedirs(config["ostree_repo"])
|
||||||
|
|
||||||
self._run_ostree_cmd(compose, variant, arch, config, repodir,
|
self._run_ostree_cmd(
|
||||||
extra_config_file=extra_config_file)
|
compose, variant, arch, config, repodir, extra_config_file=extra_config_file
|
||||||
|
)
|
||||||
|
|
||||||
if compose.notifier:
|
if compose.notifier:
|
||||||
original_ref = get_ref_from_treefile(
|
original_ref = get_ref_from_treefile(
|
||||||
@ -120,54 +137,66 @@ class OSTreeThread(WorkerThread):
|
|||||||
# instead. If the commit id could not be read, an exception will be
|
# instead. If the commit id could not be read, an exception will be
|
||||||
# raised.
|
# raised.
|
||||||
commitid = get_commitid_from_commitid_file(
|
commitid = get_commitid_from_commitid_file(
|
||||||
os.path.join(self.logdir, 'commitid.log')
|
os.path.join(self.logdir, "commitid.log")
|
||||||
)
|
)
|
||||||
compose.notifier.send('ostree',
|
compose.notifier.send(
|
||||||
|
"ostree",
|
||||||
variant=variant.uid,
|
variant=variant.uid,
|
||||||
arch=arch,
|
arch=arch,
|
||||||
ref=ref,
|
ref=ref,
|
||||||
commitid=commitid,
|
commitid=commitid,
|
||||||
repo_path=translate_path(compose, config['ostree_repo']),
|
repo_path=translate_path(compose, config["ostree_repo"]),
|
||||||
local_repo_path=config['ostree_repo'])
|
local_repo_path=config["ostree_repo"],
|
||||||
|
)
|
||||||
|
|
||||||
self.pool.log_info('[DONE ] %s' % (msg))
|
self.pool.log_info("[DONE ] %s" % (msg))
|
||||||
|
|
||||||
def _run_ostree_cmd(self, compose, variant, arch, config, config_repo, extra_config_file=None):
|
def _run_ostree_cmd(
|
||||||
|
self, compose, variant, arch, config, config_repo, extra_config_file=None
|
||||||
|
):
|
||||||
cmd = [
|
cmd = [
|
||||||
'pungi-make-ostree',
|
"pungi-make-ostree",
|
||||||
'tree',
|
"tree",
|
||||||
'--repo=%s' % config['ostree_repo'],
|
"--repo=%s" % config["ostree_repo"],
|
||||||
'--log-dir=%s' % self.logdir,
|
"--log-dir=%s" % self.logdir,
|
||||||
'--treefile=%s' % os.path.join(config_repo, config['treefile']),
|
"--treefile=%s" % os.path.join(config_repo, config["treefile"]),
|
||||||
]
|
]
|
||||||
|
|
||||||
version = util.version_generator(compose, config.get('version'))
|
version = util.version_generator(compose, config.get("version"))
|
||||||
if version:
|
if version:
|
||||||
cmd.append('--version=%s' % version)
|
cmd.append("--version=%s" % version)
|
||||||
|
|
||||||
if extra_config_file:
|
if extra_config_file:
|
||||||
cmd.append('--extra-config=%s' % extra_config_file)
|
cmd.append("--extra-config=%s" % extra_config_file)
|
||||||
|
|
||||||
if config.get('update_summary', False):
|
if config.get("update_summary", False):
|
||||||
cmd.append('--update-summary')
|
cmd.append("--update-summary")
|
||||||
|
|
||||||
ostree_ref = config.get('ostree_ref')
|
ostree_ref = config.get("ostree_ref")
|
||||||
if ostree_ref:
|
if ostree_ref:
|
||||||
cmd.append('--ostree-ref=%s' % ostree_ref)
|
cmd.append("--ostree-ref=%s" % ostree_ref)
|
||||||
|
|
||||||
if config.get('force_new_commit', False):
|
if config.get("force_new_commit", False):
|
||||||
cmd.append('--force-new-commit')
|
cmd.append("--force-new-commit")
|
||||||
|
|
||||||
packages = ['pungi', 'ostree', 'rpm-ostree']
|
packages = ["pungi", "ostree", "rpm-ostree"]
|
||||||
log_file = os.path.join(self.logdir, 'runroot.log')
|
log_file = os.path.join(self.logdir, "runroot.log")
|
||||||
mounts = [compose.topdir, config['ostree_repo']]
|
mounts = [compose.topdir, config["ostree_repo"]]
|
||||||
|
|
||||||
runroot = Runroot(compose, phase="ostree")
|
runroot = Runroot(compose, phase="ostree")
|
||||||
runroot.run(
|
runroot.run(
|
||||||
cmd, log_file=log_file, arch=arch, packages=packages,
|
cmd,
|
||||||
mounts=mounts, new_chroot=True,
|
log_file=log_file,
|
||||||
weight=compose.conf['runroot_weights'].get('ostree'))
|
arch=arch,
|
||||||
|
packages=packages,
|
||||||
|
mounts=mounts,
|
||||||
|
new_chroot=True,
|
||||||
|
weight=compose.conf["runroot_weights"].get("ostree"),
|
||||||
|
)
|
||||||
|
|
||||||
def _clone_repo(self, compose, repodir, url, branch):
|
def _clone_repo(self, compose, repodir, url, branch):
|
||||||
scm.get_dir_from_scm({'scm': 'git', 'repo': url, 'branch': branch, 'dir': '.'},
|
scm.get_dir_from_scm(
|
||||||
repodir, compose=compose)
|
{"scm": "git", "repo": url, "branch": branch, "dir": "."},
|
||||||
|
repodir,
|
||||||
|
compose=compose,
|
||||||
|
)
|
||||||
|
@ -16,7 +16,7 @@ from ..runroot import Runroot
|
|||||||
|
|
||||||
|
|
||||||
class OstreeInstallerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
class OstreeInstallerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
||||||
name = 'ostree_installer'
|
name = "ostree_installer"
|
||||||
|
|
||||||
def __init__(self, compose, buildinstall_phase, pkgset_phase=None):
|
def __init__(self, compose, buildinstall_phase, pkgset_phase=None):
|
||||||
super(OstreeInstallerPhase, self).__init__(compose)
|
super(OstreeInstallerPhase, self).__init__(compose)
|
||||||
@ -27,18 +27,21 @@ class OstreeInstallerPhase(PhaseLoggerMixin, ConfigGuardedPhase):
|
|||||||
def validate(self):
|
def validate(self):
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
if not self.compose.conf['ostree_installer_overwrite'] and not self.bi.skip():
|
if not self.compose.conf["ostree_installer_overwrite"] and not self.bi.skip():
|
||||||
for variant in self.compose.get_variants():
|
for variant in self.compose.get_variants():
|
||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
conf = util.get_arch_variant_data(self.compose.conf, self.name,
|
conf = util.get_arch_variant_data(
|
||||||
arch, variant)
|
self.compose.conf, self.name, arch, variant
|
||||||
|
)
|
||||||
if conf and not variant.is_empty:
|
if conf and not variant.is_empty:
|
||||||
errors.append('Can not generate ostree installer for %s.%s: '
|
errors.append(
|
||||||
'it has buildinstall running already and the '
|
"Can not generate ostree installer for %s.%s: "
|
||||||
'files would clash.' % (variant.uid, arch))
|
"it has buildinstall running already and the "
|
||||||
|
"files would clash." % (variant.uid, arch)
|
||||||
|
)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
raise ValueError('\n'.join(errors))
|
raise ValueError("\n".join(errors))
|
||||||
|
|
||||||
def get_repos(self):
|
def get_repos(self):
|
||||||
return [
|
return [
|
||||||
@ -67,38 +70,53 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
compose, variant, arch, config = item
|
compose, variant, arch, config = item
|
||||||
self.num = num
|
self.num = num
|
||||||
failable_arches = config.get('failable', [])
|
failable_arches = config.get("failable", [])
|
||||||
self.can_fail = util.can_arch_fail(failable_arches, arch)
|
self.can_fail = util.can_arch_fail(failable_arches, arch)
|
||||||
with util.failable(compose, self.can_fail, variant, arch, 'ostree-installer',
|
with util.failable(
|
||||||
logger=self.pool._logger):
|
compose,
|
||||||
|
self.can_fail,
|
||||||
|
variant,
|
||||||
|
arch,
|
||||||
|
"ostree-installer",
|
||||||
|
logger=self.pool._logger,
|
||||||
|
):
|
||||||
self.worker(compose, variant, arch, config)
|
self.worker(compose, variant, arch, config)
|
||||||
|
|
||||||
def worker(self, compose, variant, arch, config):
|
def worker(self, compose, variant, arch, config):
|
||||||
msg = 'Ostree phase for variant %s, arch %s' % (variant.uid, arch)
|
msg = "Ostree phase for variant %s, arch %s" % (variant.uid, arch)
|
||||||
self.pool.log_info('[BEGIN] %s' % msg)
|
self.pool.log_info("[BEGIN] %s" % msg)
|
||||||
self.logdir = compose.paths.log.topdir('%s/%s/ostree_installer-%s' % (arch, variant, self.num))
|
self.logdir = compose.paths.log.topdir(
|
||||||
|
"%s/%s/ostree_installer-%s" % (arch, variant, self.num)
|
||||||
|
)
|
||||||
|
|
||||||
repos = get_repo_urls(None, # compose==None. Special value says that method should ignore deprecated variant-type repo
|
repos = get_repo_urls(
|
||||||
shortcuts.force_list(config['repo'])
|
None, # compose==None. Special value says that method should ignore deprecated variant-type repo
|
||||||
+ self.baseurls,
|
shortcuts.force_list(config["repo"]) + self.baseurls,
|
||||||
arch=arch,
|
arch=arch,
|
||||||
logger=self.pool)
|
logger=self.pool,
|
||||||
|
)
|
||||||
if compose.has_comps:
|
if compose.has_comps:
|
||||||
repos.append(
|
repos.append(
|
||||||
translate_path(
|
translate_path(
|
||||||
compose,
|
compose,
|
||||||
compose.paths.work.comps_repo(
|
compose.paths.work.comps_repo(
|
||||||
'$basearch', variant=variant, create_dir=False
|
"$basearch", variant=variant, create_dir=False
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
repos = [url.replace('$arch', arch) for url in repos]
|
repos = [url.replace("$arch", arch) for url in repos]
|
||||||
output_dir = os.path.join(compose.paths.work.topdir(arch), variant.uid, 'ostree_installer')
|
output_dir = os.path.join(
|
||||||
|
compose.paths.work.topdir(arch), variant.uid, "ostree_installer"
|
||||||
|
)
|
||||||
util.makedirs(os.path.dirname(output_dir))
|
util.makedirs(os.path.dirname(output_dir))
|
||||||
|
|
||||||
self.template_dir = os.path.join(compose.paths.work.topdir(arch), variant.uid, 'lorax_templates')
|
self.template_dir = os.path.join(
|
||||||
self._clone_templates(compose, config.get('template_repo'), config.get('template_branch'))
|
compose.paths.work.topdir(arch), variant.uid, "lorax_templates"
|
||||||
disc_type = compose.conf['disc_types'].get('ostree', 'ostree')
|
)
|
||||||
|
self._clone_templates(
|
||||||
|
compose, config.get("template_repo"), config.get("template_branch")
|
||||||
|
)
|
||||||
|
disc_type = compose.conf["disc_types"].get("ostree", "ostree")
|
||||||
|
|
||||||
volid = get_volid(compose, arch, variant, disc_type=disc_type)
|
volid = get_volid(compose, arch, variant, disc_type=disc_type)
|
||||||
self._run_ostree_cmd(compose, variant, arch, config, repos, output_dir, volid)
|
self._run_ostree_cmd(compose, variant, arch, config, repos, output_dir, volid)
|
||||||
@ -106,24 +124,29 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
filename = compose.get_image_name(arch, variant, disc_type=disc_type)
|
filename = compose.get_image_name(arch, variant, disc_type=disc_type)
|
||||||
self._copy_image(compose, variant, arch, filename, output_dir)
|
self._copy_image(compose, variant, arch, filename, output_dir)
|
||||||
self._add_to_manifest(compose, variant, arch, filename)
|
self._add_to_manifest(compose, variant, arch, filename)
|
||||||
self.pool.log_info('[DONE ] %s' % (msg))
|
self.pool.log_info("[DONE ] %s" % (msg))
|
||||||
|
|
||||||
def _clone_templates(self, compose, url, branch='master'):
|
def _clone_templates(self, compose, url, branch="master"):
|
||||||
if not url:
|
if not url:
|
||||||
self.template_dir = None
|
self.template_dir = None
|
||||||
return
|
return
|
||||||
scm.get_dir_from_scm({'scm': 'git', 'repo': url, 'branch': branch, 'dir': '.'},
|
scm.get_dir_from_scm(
|
||||||
self.template_dir, compose=compose)
|
{"scm": "git", "repo": url, "branch": branch, "dir": "."},
|
||||||
|
self.template_dir,
|
||||||
|
compose=compose,
|
||||||
|
)
|
||||||
|
|
||||||
def _get_release(self, compose, config):
|
def _get_release(self, compose, config):
|
||||||
if 'release' in config:
|
if "release" in config:
|
||||||
return version_generator(compose, config['release']) or compose.image_release
|
return (
|
||||||
return config.get('release', None)
|
version_generator(compose, config["release"]) or compose.image_release
|
||||||
|
)
|
||||||
|
return config.get("release", None)
|
||||||
|
|
||||||
def _copy_image(self, compose, variant, arch, filename, output_dir):
|
def _copy_image(self, compose, variant, arch, filename, output_dir):
|
||||||
iso_path = compose.paths.compose.iso_path(arch, variant, filename)
|
iso_path = compose.paths.compose.iso_path(arch, variant, filename)
|
||||||
os_path = compose.paths.compose.os_tree(arch, variant)
|
os_path = compose.paths.compose.os_tree(arch, variant)
|
||||||
boot_iso = os.path.join(output_dir, 'images', 'boot.iso')
|
boot_iso = os.path.join(output_dir, "images", "boot.iso")
|
||||||
|
|
||||||
util.copy_all(output_dir, os_path)
|
util.copy_all(output_dir, os_path)
|
||||||
try:
|
try:
|
||||||
@ -133,7 +156,9 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
|
|
||||||
def _add_to_manifest(self, compose, variant, arch, filename):
|
def _add_to_manifest(self, compose, variant, arch, filename):
|
||||||
full_iso_path = compose.paths.compose.iso_path(arch, variant, filename)
|
full_iso_path = compose.paths.compose.iso_path(arch, variant, filename)
|
||||||
iso_path = compose.paths.compose.iso_path(arch, variant, filename, relative=True)
|
iso_path = compose.paths.compose.iso_path(
|
||||||
|
arch, variant, filename, relative=True
|
||||||
|
)
|
||||||
implant_md5 = iso.get_implanted_md5(full_iso_path)
|
implant_md5 = iso.get_implanted_md5(full_iso_path)
|
||||||
|
|
||||||
img = images.Image(compose.im)
|
img = images.Image(compose.im)
|
||||||
@ -148,8 +173,8 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
img.bootable = True
|
img.bootable = True
|
||||||
img.subvariant = variant.uid
|
img.subvariant = variant.uid
|
||||||
img.implant_md5 = implant_md5
|
img.implant_md5 = implant_md5
|
||||||
setattr(img, 'can_fail', self.can_fail)
|
setattr(img, "can_fail", self.can_fail)
|
||||||
setattr(img, 'deliverable', 'ostree-installer')
|
setattr(img, "deliverable", "ostree-installer")
|
||||||
try:
|
try:
|
||||||
img.volume_id = iso.get_volume_id(full_iso_path)
|
img.volume_id = iso.get_volume_id(full_iso_path)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
@ -163,17 +188,21 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
"""
|
"""
|
||||||
templates = []
|
templates = []
|
||||||
for template in config.get(key, []):
|
for template in config.get(key, []):
|
||||||
if template[0] != '/':
|
if template[0] != "/":
|
||||||
if not self.template_dir:
|
if not self.template_dir:
|
||||||
raise RuntimeError('Relative path to template without setting template_repo.')
|
raise RuntimeError(
|
||||||
|
"Relative path to template without setting template_repo."
|
||||||
|
)
|
||||||
template = os.path.join(self.template_dir, template)
|
template = os.path.join(self.template_dir, template)
|
||||||
templates.append(template)
|
templates.append(template)
|
||||||
return templates
|
return templates
|
||||||
|
|
||||||
def _run_ostree_cmd(self, compose, variant, arch, config, source_repo, output_dir, volid):
|
def _run_ostree_cmd(
|
||||||
|
self, compose, variant, arch, config, source_repo, output_dir, volid
|
||||||
|
):
|
||||||
lorax_wrapper = lorax.LoraxWrapper()
|
lorax_wrapper = lorax.LoraxWrapper()
|
||||||
lorax_cmd = lorax_wrapper.get_lorax_cmd(
|
lorax_cmd = lorax_wrapper.get_lorax_cmd(
|
||||||
compose.conf['release_name'],
|
compose.conf["release_name"],
|
||||||
compose.conf["release_version"],
|
compose.conf["release_version"],
|
||||||
self._get_release(compose, config),
|
self._get_release(compose, config),
|
||||||
repo_baseurl=source_repo,
|
repo_baseurl=source_repo,
|
||||||
@ -182,25 +211,32 @@ class OstreeInstallerThread(WorkerThread):
|
|||||||
nomacboot=True,
|
nomacboot=True,
|
||||||
volid=volid,
|
volid=volid,
|
||||||
buildarch=get_valid_arches(arch)[0],
|
buildarch=get_valid_arches(arch)[0],
|
||||||
buildinstallpackages=config.get('installpkgs'),
|
buildinstallpackages=config.get("installpkgs"),
|
||||||
add_template=self._get_templates(config, 'add_template'),
|
add_template=self._get_templates(config, "add_template"),
|
||||||
add_arch_template=self._get_templates(config, 'add_arch_template'),
|
add_arch_template=self._get_templates(config, "add_arch_template"),
|
||||||
add_template_var=config.get('add_template_var'),
|
add_template_var=config.get("add_template_var"),
|
||||||
add_arch_template_var=config.get('add_arch_template_var'),
|
add_arch_template_var=config.get("add_arch_template_var"),
|
||||||
rootfs_size=config.get('rootfs_size'),
|
rootfs_size=config.get("rootfs_size"),
|
||||||
is_final=compose.supported,
|
is_final=compose.supported,
|
||||||
log_dir=self.logdir,
|
log_dir=self.logdir,
|
||||||
)
|
)
|
||||||
cmd = 'rm -rf %s && %s' % (shlex_quote(output_dir),
|
cmd = "rm -rf %s && %s" % (
|
||||||
' '.join([shlex_quote(x) for x in lorax_cmd]))
|
shlex_quote(output_dir),
|
||||||
|
" ".join([shlex_quote(x) for x in lorax_cmd]),
|
||||||
|
)
|
||||||
|
|
||||||
packages = ['pungi', 'lorax', 'ostree']
|
packages = ["pungi", "lorax", "ostree"]
|
||||||
packages += config.get('extra_runroot_pkgs', [])
|
packages += config.get("extra_runroot_pkgs", [])
|
||||||
|
|
||||||
log_file = os.path.join(self.logdir, 'runroot.log')
|
log_file = os.path.join(self.logdir, "runroot.log")
|
||||||
|
|
||||||
runroot = Runroot(compose, phase="ostree_installer")
|
runroot = Runroot(compose, phase="ostree_installer")
|
||||||
runroot.run(
|
runroot.run(
|
||||||
cmd, log_file=log_file, arch=arch, packages=packages,
|
cmd,
|
||||||
mounts=[compose.topdir], chown_paths=[output_dir],
|
log_file=log_file,
|
||||||
weight=compose.conf['runroot_weights'].get('ostree_installer'))
|
arch=arch,
|
||||||
|
packages=packages,
|
||||||
|
mounts=[compose.topdir],
|
||||||
|
chown_paths=[output_dir],
|
||||||
|
weight=compose.conf["runroot_weights"].get("ostree_installer"),
|
||||||
|
)
|
||||||
|
@ -14,7 +14,9 @@ def gather_phases_metadata(source_object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if not source_object:
|
if not source_object:
|
||||||
raise ValueError("PhasesMetadata can not load any data - it got empty parameter")
|
raise ValueError(
|
||||||
|
"PhasesMetadata can not load any data - it got empty parameter"
|
||||||
|
)
|
||||||
|
|
||||||
phases = []
|
phases = []
|
||||||
for item in dir(source_object):
|
for item in dir(source_object):
|
||||||
@ -23,9 +25,11 @@ def gather_phases_metadata(source_object):
|
|||||||
continue
|
continue
|
||||||
if issubclass(cls, PhaseBase):
|
if issubclass(cls, PhaseBase):
|
||||||
try:
|
try:
|
||||||
name_attr = getattr(cls, 'name')
|
name_attr = getattr(cls, "name")
|
||||||
phases.append(name_attr)
|
phases.append(name_attr)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("Bad phase-class format: '%s' is missing attribute 'name'" % item)
|
raise AttributeError(
|
||||||
|
"Bad phase-class format: '%s' is missing attribute 'name'" % item
|
||||||
|
)
|
||||||
|
|
||||||
return phases
|
return phases
|
||||||
|
@ -19,6 +19,7 @@ from pungi.phases.base import PhaseBase
|
|||||||
|
|
||||||
class PkgsetPhase(PhaseBase):
|
class PkgsetPhase(PhaseBase):
|
||||||
"""PKGSET"""
|
"""PKGSET"""
|
||||||
|
|
||||||
name = "pkgset"
|
name = "pkgset"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -30,6 +31,7 @@ class PkgsetPhase(PhaseBase):
|
|||||||
pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]
|
pkgset_source = "PkgsetSource%s" % self.compose.conf["pkgset_source"]
|
||||||
from .source import PkgsetSourceContainer
|
from .source import PkgsetSourceContainer
|
||||||
from . import sources
|
from . import sources
|
||||||
|
|
||||||
PkgsetSourceContainer.register_module(sources)
|
PkgsetSourceContainer.register_module(sources)
|
||||||
container = PkgsetSourceContainer()
|
container = PkgsetSourceContainer()
|
||||||
SourceClass = container[pkgset_source]
|
SourceClass = container[pkgset_source]
|
||||||
|
@ -45,8 +45,9 @@ class ReaderThread(WorkerThread):
|
|||||||
# rpm_info, build_info = item
|
# rpm_info, build_info = item
|
||||||
|
|
||||||
if (num % 100 == 0) or (num == self.pool.queue_total):
|
if (num % 100 == 0) or (num == self.pool.queue_total):
|
||||||
self.pool.package_set.log_debug("Processed %s out of %s packages"
|
self.pool.package_set.log_debug(
|
||||||
% (num, self.pool.queue_total))
|
"Processed %s out of %s packages" % (num, self.pool.queue_total)
|
||||||
|
)
|
||||||
|
|
||||||
rpm_path = self.pool.package_set.get_package_path(item)
|
rpm_path = self.pool.package_set.get_package_path(item)
|
||||||
if rpm_path is None:
|
if rpm_path is None:
|
||||||
@ -79,9 +80,14 @@ class ReaderThread(WorkerThread):
|
|||||||
|
|
||||||
|
|
||||||
class PackageSetBase(kobo.log.LoggingBase):
|
class PackageSetBase(kobo.log.LoggingBase):
|
||||||
|
def __init__(
|
||||||
def __init__(self, name, sigkey_ordering, arches=None, logger=None,
|
self,
|
||||||
allow_invalid_sigkeys=False):
|
name,
|
||||||
|
sigkey_ordering,
|
||||||
|
arches=None,
|
||||||
|
logger=None,
|
||||||
|
allow_invalid_sigkeys=False,
|
||||||
|
):
|
||||||
super(PackageSetBase, self).__init__(logger=logger)
|
super(PackageSetBase, self).__init__(logger=logger)
|
||||||
self.name = name
|
self.name = name
|
||||||
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
|
self.file_cache = kobo.pkgset.FileCache(kobo.pkgset.SimpleRpmWrapper)
|
||||||
@ -122,14 +128,20 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
Raises RuntimeError containing details of RPMs with invalid
|
Raises RuntimeError containing details of RPMs with invalid
|
||||||
sigkeys defined in `rpminfos`.
|
sigkeys defined in `rpminfos`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def nvr_formatter(package_info):
|
def nvr_formatter(package_info):
|
||||||
# joins NVR parts of the package with '-' character.
|
# joins NVR parts of the package with '-' character.
|
||||||
return '-'.join((package_info['name'], package_info['version'], package_info['release']))
|
return "-".join(
|
||||||
|
(package_info["name"], package_info["version"], package_info["release"])
|
||||||
|
)
|
||||||
|
|
||||||
def get_error(sigkeys, infos):
|
def get_error(sigkeys, infos):
|
||||||
return "RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s" % (
|
return (
|
||||||
|
"RPM(s) not found for sigs: %s. Check log for details. Unsigned packages:\n%s"
|
||||||
|
% (
|
||||||
sigkeys,
|
sigkeys,
|
||||||
'\n'.join(sorted(set(nvr_formatter(rpminfo) for rpminfo in infos))),
|
"\n".join(sorted(set(nvr_formatter(rpminfo) for rpminfo in infos))),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if not isinstance(rpminfos, dict):
|
if not isinstance(rpminfos, dict):
|
||||||
@ -198,14 +210,15 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
# arches (excluding multilib arches)
|
# arches (excluding multilib arches)
|
||||||
if primary_arch:
|
if primary_arch:
|
||||||
exclusivearch_list = get_valid_arches(
|
exclusivearch_list = get_valid_arches(
|
||||||
primary_arch, multilib=False, add_noarch=False, add_src=False)
|
primary_arch, multilib=False, add_noarch=False, add_src=False
|
||||||
|
)
|
||||||
# We don't want to consider noarch: if a package is true noarch
|
# We don't want to consider noarch: if a package is true noarch
|
||||||
# build (not just a subpackage), it has to have noarch in
|
# build (not just a subpackage), it has to have noarch in
|
||||||
# ExclusiveArch otherwise rpm will refuse to build it.
|
# ExclusiveArch otherwise rpm will refuse to build it.
|
||||||
# This should eventually become a default, but it could have a big
|
# This should eventually become a default, but it could have a big
|
||||||
# impact and thus it's hidden behind an option.
|
# impact and thus it's hidden behind an option.
|
||||||
if not exclusive_noarch and 'noarch' in exclusivearch_list:
|
if not exclusive_noarch and "noarch" in exclusivearch_list:
|
||||||
exclusivearch_list.remove('noarch')
|
exclusivearch_list.remove("noarch")
|
||||||
else:
|
else:
|
||||||
exclusivearch_list = None
|
exclusivearch_list = None
|
||||||
for arch in arch_list:
|
for arch in arch_list:
|
||||||
@ -237,7 +250,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
for i in self.rpms_by_arch[arch]:
|
for i in self.rpms_by_arch[arch]:
|
||||||
rpm_path = i.file_path
|
rpm_path = i.file_path
|
||||||
if remove_path_prefix and rpm_path.startswith(remove_path_prefix):
|
if remove_path_prefix and rpm_path.startswith(remove_path_prefix):
|
||||||
rpm_path = rpm_path[len(remove_path_prefix):]
|
rpm_path = rpm_path[len(remove_path_prefix) :]
|
||||||
f.write("%s\n" % rpm_path)
|
f.write("%s\n" % rpm_path)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -256,7 +269,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
"""
|
"""
|
||||||
Saves the current FileCache using the pickle module to `file_path`.
|
Saves the current FileCache using the pickle module to `file_path`.
|
||||||
"""
|
"""
|
||||||
with open(file_path, 'wb') as f:
|
with open(file_path, "wb") as f:
|
||||||
pickle.dump(self.file_cache, f, protocol=pickle.HIGHEST_PROTOCOL)
|
pickle.dump(self.file_cache, f, protocol=pickle.HIGHEST_PROTOCOL)
|
||||||
|
|
||||||
|
|
||||||
@ -282,10 +295,19 @@ class FilelistPackageSet(PackageSetBase):
|
|||||||
|
|
||||||
|
|
||||||
class KojiPackageSet(PackageSetBase):
|
class KojiPackageSet(PackageSetBase):
|
||||||
def __init__(self, name, koji_wrapper, sigkey_ordering, arches=None, logger=None,
|
def __init__(
|
||||||
packages=None, allow_invalid_sigkeys=False,
|
self,
|
||||||
populate_only_packages=False, cache_region=None,
|
name,
|
||||||
extra_builds=None):
|
koji_wrapper,
|
||||||
|
sigkey_ordering,
|
||||||
|
arches=None,
|
||||||
|
logger=None,
|
||||||
|
packages=None,
|
||||||
|
allow_invalid_sigkeys=False,
|
||||||
|
populate_only_packages=False,
|
||||||
|
cache_region=None,
|
||||||
|
extra_builds=None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Creates new KojiPackageSet.
|
Creates new KojiPackageSet.
|
||||||
|
|
||||||
@ -320,7 +342,7 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
sigkey_ordering=sigkey_ordering,
|
sigkey_ordering=sigkey_ordering,
|
||||||
arches=arches,
|
arches=arches,
|
||||||
logger=logger,
|
logger=logger,
|
||||||
allow_invalid_sigkeys=allow_invalid_sigkeys
|
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||||
)
|
)
|
||||||
self.koji_wrapper = koji_wrapper
|
self.koji_wrapper = koji_wrapper
|
||||||
# Names of packages to look for in the Koji tag.
|
# Names of packages to look for in the Koji tag.
|
||||||
@ -356,9 +378,13 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
builds = []
|
builds = []
|
||||||
|
|
||||||
builds = self.koji_wrapper.retrying_multicall_map(
|
builds = self.koji_wrapper.retrying_multicall_map(
|
||||||
self.koji_proxy, self.koji_proxy.getBuild, list_of_args=self.extra_builds)
|
self.koji_proxy, self.koji_proxy.getBuild, list_of_args=self.extra_builds
|
||||||
|
)
|
||||||
rpms_in_builds = self.koji_wrapper.retrying_multicall_map(
|
rpms_in_builds = self.koji_wrapper.retrying_multicall_map(
|
||||||
self.koji_proxy, self.koji_proxy.listBuildRPMs, list_of_args=self.extra_builds)
|
self.koji_proxy,
|
||||||
|
self.koji_proxy.listBuildRPMs,
|
||||||
|
list_of_args=self.extra_builds,
|
||||||
|
)
|
||||||
|
|
||||||
rpms = []
|
rpms = []
|
||||||
for rpms_in_build in rpms_in_builds:
|
for rpms_in_build in rpms_in_builds:
|
||||||
@ -371,18 +397,23 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
|
|
||||||
if self.cache_region:
|
if self.cache_region:
|
||||||
cache_key = "KojiPackageSet.get_latest_rpms_%s_%s_%s" % (
|
cache_key = "KojiPackageSet.get_latest_rpms_%s_%s_%s" % (
|
||||||
tag, str(event), str(inherit))
|
tag,
|
||||||
|
str(event),
|
||||||
|
str(inherit),
|
||||||
|
)
|
||||||
cached_response = self.cache_region.get(cache_key)
|
cached_response = self.cache_region.get(cache_key)
|
||||||
if cached_response:
|
if cached_response:
|
||||||
return cached_response
|
return cached_response
|
||||||
else:
|
else:
|
||||||
response = self.koji_proxy.listTaggedRPMS(
|
response = self.koji_proxy.listTaggedRPMS(
|
||||||
tag, event=event, inherit=inherit, latest=True)
|
tag, event=event, inherit=inherit, latest=True
|
||||||
|
)
|
||||||
self.cache_region.set(cache_key, response)
|
self.cache_region.set(cache_key, response)
|
||||||
return response
|
return response
|
||||||
else:
|
else:
|
||||||
return self.koji_proxy.listTaggedRPMS(
|
return self.koji_proxy.listTaggedRPMS(
|
||||||
tag, event=event, inherit=inherit, latest=True)
|
tag, event=event, inherit=inherit, latest=True
|
||||||
|
)
|
||||||
|
|
||||||
def get_package_path(self, queue_item):
|
def get_package_path(self, queue_item):
|
||||||
rpm_info, build_info = queue_item
|
rpm_info, build_info = queue_item
|
||||||
@ -393,12 +424,14 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
# we're looking for *signed* copies here
|
# we're looking for *signed* copies here
|
||||||
continue
|
continue
|
||||||
sigkey = sigkey.lower()
|
sigkey = sigkey.lower()
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey))
|
rpm_path = os.path.join(
|
||||||
|
pathinfo.build(build_info), pathinfo.signed(rpm_info, sigkey)
|
||||||
|
)
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
if os.path.isfile(rpm_path):
|
if os.path.isfile(rpm_path):
|
||||||
return rpm_path
|
return rpm_path
|
||||||
|
|
||||||
if None in self.sigkey_ordering or '' in self.sigkey_ordering:
|
if None in self.sigkey_ordering or "" in self.sigkey_ordering:
|
||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
@ -414,8 +447,10 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
return rpm_path
|
return rpm_path
|
||||||
|
|
||||||
self._invalid_sigkey_rpms.append(rpm_info)
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
self.log_error("RPM %s not found for sigs: %s. Paths checked: %s"
|
self.log_error(
|
||||||
% (rpm_info, self.sigkey_ordering, paths))
|
"RPM %s not found for sigs: %s. Paths checked: %s"
|
||||||
|
% (rpm_info, self.sigkey_ordering, paths)
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def populate(self, tag, event=None, inherit=True, include_packages=None):
|
def populate(self, tag, event=None, inherit=True, include_packages=None):
|
||||||
@ -433,7 +468,11 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
if type(event) is dict:
|
if type(event) is dict:
|
||||||
event = event["id"]
|
event = event["id"]
|
||||||
|
|
||||||
msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (tag, event, inherit)
|
msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (
|
||||||
|
tag,
|
||||||
|
event,
|
||||||
|
inherit,
|
||||||
|
)
|
||||||
self.log_info("[BEGIN] %s" % msg)
|
self.log_info("[BEGIN] %s" % msg)
|
||||||
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
||||||
extra_rpms, extra_builds = self.get_extra_rpms()
|
extra_rpms, extra_builds = self.get_extra_rpms()
|
||||||
@ -442,13 +481,16 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
|
|
||||||
extra_builds_by_name = {}
|
extra_builds_by_name = {}
|
||||||
for build_info in extra_builds:
|
for build_info in extra_builds:
|
||||||
extra_builds_by_name[build_info['name']] = build_info['build_id']
|
extra_builds_by_name[build_info["name"]] = build_info["build_id"]
|
||||||
|
|
||||||
builds_by_id = {}
|
builds_by_id = {}
|
||||||
exclude_build_id = []
|
exclude_build_id = []
|
||||||
for build_info in builds:
|
for build_info in builds:
|
||||||
build_id, build_name = build_info['build_id'], build_info['name']
|
build_id, build_name = build_info["build_id"], build_info["name"]
|
||||||
if build_name in extra_builds_by_name and build_id != extra_builds_by_name[build_name]:
|
if (
|
||||||
|
build_name in extra_builds_by_name
|
||||||
|
and build_id != extra_builds_by_name[build_name]
|
||||||
|
):
|
||||||
exclude_build_id.append(build_id)
|
exclude_build_id.append(build_id)
|
||||||
else:
|
else:
|
||||||
builds_by_id.setdefault(build_id, build_info)
|
builds_by_id.setdefault(build_id, build_info)
|
||||||
@ -461,9 +503,11 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
# it would be missing from the package set. Even if it ultimately does
|
# it would be missing from the package set. Even if it ultimately does
|
||||||
# not end in the compose, we need it to extract ExcludeArch and
|
# not end in the compose, we need it to extract ExcludeArch and
|
||||||
# ExclusiveArch for noarch packages.
|
# ExclusiveArch for noarch packages.
|
||||||
for rpm_info in itertools.chain((rpm for rpm in rpms if not _is_src(rpm)),
|
for rpm_info in itertools.chain(
|
||||||
(rpm for rpm in rpms if _is_src(rpm))):
|
(rpm for rpm in rpms if not _is_src(rpm)),
|
||||||
if rpm_info['build_id'] in exclude_build_id:
|
(rpm for rpm in rpms if _is_src(rpm)),
|
||||||
|
):
|
||||||
|
if rpm_info["build_id"] in exclude_build_id:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.arches and rpm_info["arch"] not in self.arches:
|
if self.arches and rpm_info["arch"] not in self.arches:
|
||||||
@ -482,8 +526,11 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if (self.populate_only_packages and self.packages and
|
if (
|
||||||
rpm_info['name'] not in self.packages):
|
self.populate_only_packages
|
||||||
|
and self.packages
|
||||||
|
and rpm_info["name"] not in self.packages
|
||||||
|
):
|
||||||
skipped_packages_count += 1
|
skipped_packages_count += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -494,19 +541,22 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
result_rpms.append((rpm_info, build_info))
|
result_rpms.append((rpm_info, build_info))
|
||||||
if self.populate_only_packages and self.packages:
|
if self.populate_only_packages and self.packages:
|
||||||
# Only add the package if we already have some whitelist.
|
# Only add the package if we already have some whitelist.
|
||||||
self.packages.add(build_info['name'])
|
self.packages.add(build_info["name"])
|
||||||
|
|
||||||
if skipped_packages_count:
|
if skipped_packages_count:
|
||||||
self.log_debug("Skipped %d packages, not marked as to be "
|
self.log_debug(
|
||||||
"included in a compose." % skipped_packages_count)
|
"Skipped %d packages, not marked as to be "
|
||||||
|
"included in a compose." % skipped_packages_count
|
||||||
|
)
|
||||||
|
|
||||||
result = self.read_packages(result_rpms, result_srpms)
|
result = self.read_packages(result_rpms, result_srpms)
|
||||||
|
|
||||||
# Check that after reading the packages, every package that is
|
# Check that after reading the packages, every package that is
|
||||||
# included in a compose has the right sigkey.
|
# included in a compose has the right sigkey.
|
||||||
if self._invalid_sigkey_rpms:
|
if self._invalid_sigkey_rpms:
|
||||||
invalid_sigkey_rpms = [rpm for rpm in self._invalid_sigkey_rpms
|
invalid_sigkey_rpms = [
|
||||||
if rpm["name"] in self.packages]
|
rpm for rpm in self._invalid_sigkey_rpms if rpm["name"] in self.packages
|
||||||
|
]
|
||||||
if invalid_sigkey_rpms:
|
if invalid_sigkey_rpms:
|
||||||
self.raise_invalid_sigkeys_exception(invalid_sigkey_rpms)
|
self.raise_invalid_sigkeys_exception(invalid_sigkey_rpms)
|
||||||
|
|
||||||
@ -516,4 +566,4 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
|
|
||||||
def _is_src(rpm_info):
|
def _is_src(rpm_info):
|
||||||
"""Check if rpm info object returned by Koji refers to source packages."""
|
"""Check if rpm info object returned by Koji refers to source packages."""
|
||||||
return rpm_info['arch'] in ('src', 'nosrc')
|
return rpm_info["arch"] in ("src", "nosrc")
|
||||||
|
@ -62,8 +62,9 @@ def variant_dict_from_str(compose, module_str):
|
|||||||
nsv = module_str.split(":")
|
nsv = module_str.split(":")
|
||||||
if len(nsv) > 4:
|
if len(nsv) > 4:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Module string \"%s\" is not recognized. "
|
'Module string "%s" is not recognized. '
|
||||||
"Only NAME:STREAM[:VERSION[:CONTEXT]] is allowed.")
|
"Only NAME:STREAM[:VERSION[:CONTEXT]] is allowed."
|
||||||
|
)
|
||||||
if len(nsv) > 3:
|
if len(nsv) > 3:
|
||||||
module_info["context"] = nsv[3]
|
module_info["context"] = nsv[3]
|
||||||
if len(nsv) > 2:
|
if len(nsv) > 2:
|
||||||
@ -77,23 +78,24 @@ def variant_dict_from_str(compose, module_str):
|
|||||||
compose.log_warning(
|
compose.log_warning(
|
||||||
"Variant file uses old format of module definition with '-'"
|
"Variant file uses old format of module definition with '-'"
|
||||||
"delimiter, please switch to official format defined by "
|
"delimiter, please switch to official format defined by "
|
||||||
"Modules Naming Policy.")
|
"Modules Naming Policy."
|
||||||
|
)
|
||||||
|
|
||||||
module_info = {}
|
module_info = {}
|
||||||
# The regex is matching a string which should represent the release number
|
# The regex is matching a string which should represent the release number
|
||||||
# of a module. The release number is in format: "%Y%m%d%H%M%S"
|
# of a module. The release number is in format: "%Y%m%d%H%M%S"
|
||||||
release_regex = re.compile(r"^(\d){14}$")
|
release_regex = re.compile(r"^(\d){14}$")
|
||||||
|
|
||||||
section_start = module_str.rfind('-')
|
section_start = module_str.rfind("-")
|
||||||
module_str_first_part = module_str[section_start+1:]
|
module_str_first_part = module_str[section_start + 1 :]
|
||||||
if release_regex.match(module_str_first_part):
|
if release_regex.match(module_str_first_part):
|
||||||
module_info['version'] = module_str_first_part
|
module_info["version"] = module_str_first_part
|
||||||
module_str = module_str[:section_start]
|
module_str = module_str[:section_start]
|
||||||
section_start = module_str.rfind('-')
|
section_start = module_str.rfind("-")
|
||||||
module_info['stream'] = module_str[section_start+1:]
|
module_info["stream"] = module_str[section_start + 1 :]
|
||||||
else:
|
else:
|
||||||
module_info['stream'] = module_str_first_part
|
module_info["stream"] = module_str_first_part
|
||||||
module_info['name'] = module_str[:section_start]
|
module_info["name"] = module_str[:section_start]
|
||||||
|
|
||||||
return module_info
|
return module_info
|
||||||
|
|
||||||
@ -120,7 +122,7 @@ def get_koji_modules(compose, koji_wrapper, event, module_info_str):
|
|||||||
module_info.get("version", "*"),
|
module_info.get("version", "*"),
|
||||||
module_info.get("context", "*"),
|
module_info.get("context", "*"),
|
||||||
)
|
)
|
||||||
query_str = query_str.replace('*.*', '*')
|
query_str = query_str.replace("*.*", "*")
|
||||||
|
|
||||||
koji_builds = koji_proxy.search(query_str, "build", "glob")
|
koji_builds = koji_proxy.search(query_str, "build", "glob")
|
||||||
|
|
||||||
@ -149,7 +151,7 @@ def get_koji_modules(compose, koji_wrapper, event, module_info_str):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if md['state'] == pungi.wrappers.kojiwrapper.KOJI_BUILD_DELETED:
|
if md["state"] == pungi.wrappers.kojiwrapper.KOJI_BUILD_DELETED:
|
||||||
compose.log_debug(
|
compose.log_debug(
|
||||||
"Module build %s has been deleted, ignoring it." % build["name"]
|
"Module build %s has been deleted, ignoring it." % build["name"]
|
||||||
)
|
)
|
||||||
@ -166,7 +168,7 @@ def get_koji_modules(compose, koji_wrapper, event, module_info_str):
|
|||||||
# If there is version provided, then all modules with that version will go
|
# If there is version provided, then all modules with that version will go
|
||||||
# in. In case version is missing, we will find the latest version and
|
# in. In case version is missing, we will find the latest version and
|
||||||
# include all modules with that version.
|
# include all modules with that version.
|
||||||
if not module_info.get('version'):
|
if not module_info.get("version"):
|
||||||
# select all found modules with latest version
|
# select all found modules with latest version
|
||||||
sorted_modules = sorted(
|
sorted_modules = sorted(
|
||||||
modules, key=lambda item: item["module_version"], reverse=True
|
modules, key=lambda item: item["module_version"], reverse=True
|
||||||
@ -188,7 +190,9 @@ class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
|||||||
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)
|
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(koji_profile)
|
||||||
# path prefix must contain trailing '/'
|
# path prefix must contain trailing '/'
|
||||||
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
||||||
package_sets = get_pkgset_from_koji(self.compose, self.koji_wrapper, path_prefix)
|
package_sets = get_pkgset_from_koji(
|
||||||
|
self.compose, self.koji_wrapper, path_prefix
|
||||||
|
)
|
||||||
return (package_sets, path_prefix)
|
return (package_sets, path_prefix)
|
||||||
|
|
||||||
|
|
||||||
@ -327,7 +331,10 @@ def _get_modules_from_koji(
|
|||||||
compose.log_info(
|
compose.log_info(
|
||||||
"Module '%s' in variant '%s' will use Koji tag '%s' "
|
"Module '%s' in variant '%s' will use Koji tag '%s' "
|
||||||
"(as a result of querying module '%s')",
|
"(as a result of querying module '%s')",
|
||||||
nsvc, variant, tag, module["name"]
|
nsvc,
|
||||||
|
variant,
|
||||||
|
tag,
|
||||||
|
module["name"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Store mapping NSVC --> koji_tag into variant. This is needed
|
# Store mapping NSVC --> koji_tag into variant. This is needed
|
||||||
@ -450,7 +457,8 @@ def _get_modules_from_koji_tags(
|
|||||||
# "release" in Koji build and with latest=True, Koji would return
|
# "release" in Koji build and with latest=True, Koji would return
|
||||||
# only builds with highest release.
|
# only builds with highest release.
|
||||||
module_builds = koji_proxy.listTagged(
|
module_builds = koji_proxy.listTagged(
|
||||||
tag, event=event_id["id"], inherit=True, type="module")
|
tag, event=event_id["id"], inherit=True, type="module"
|
||||||
|
)
|
||||||
|
|
||||||
# Filter out builds inherited from non-top tag
|
# Filter out builds inherited from non-top tag
|
||||||
module_builds = filter_inherited(koji_proxy, event_id, module_builds, tag)
|
module_builds = filter_inherited(koji_proxy, event_id, module_builds, tag)
|
||||||
@ -482,9 +490,11 @@ def _get_modules_from_koji_tags(
|
|||||||
latest_builds = []
|
latest_builds = []
|
||||||
module_builds = sorted(module_builds, key=_key, reverse=True)
|
module_builds = sorted(module_builds, key=_key, reverse=True)
|
||||||
for ns, ns_builds in groupby(
|
for ns, ns_builds in groupby(
|
||||||
module_builds, key=lambda x: ":".join([x["name"], x["version"]])):
|
module_builds, key=lambda x: ":".join([x["name"], x["version"]])
|
||||||
|
):
|
||||||
for nsv, nsv_builds in groupby(
|
for nsv, nsv_builds in groupby(
|
||||||
ns_builds, key=lambda x: x["release"].split(".")[0]):
|
ns_builds, key=lambda x: x["release"].split(".")[0]
|
||||||
|
):
|
||||||
latest_builds += list(nsv_builds)
|
latest_builds += list(nsv_builds)
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -493,8 +503,12 @@ def _get_modules_from_koji_tags(
|
|||||||
for build in latest_builds:
|
for build in latest_builds:
|
||||||
# Get the Build from Koji to get modulemd and module_tag.
|
# Get the Build from Koji to get modulemd and module_tag.
|
||||||
build = koji_proxy.getBuild(build["build_id"])
|
build = koji_proxy.getBuild(build["build_id"])
|
||||||
module_tag = build.get("extra", {}).get("typeinfo", {}).get(
|
module_tag = (
|
||||||
"module", {}).get("content_koji_tag", "")
|
build.get("extra", {})
|
||||||
|
.get("typeinfo", {})
|
||||||
|
.get("module", {})
|
||||||
|
.get("content_koji_tag", "")
|
||||||
|
)
|
||||||
|
|
||||||
variant_tags[variant].append(module_tag)
|
variant_tags[variant].append(module_tag)
|
||||||
|
|
||||||
@ -516,7 +530,9 @@ def _get_modules_from_koji_tags(
|
|||||||
if tag_to_mmd[module_tag]:
|
if tag_to_mmd[module_tag]:
|
||||||
compose.log_info(
|
compose.log_info(
|
||||||
"Module %s in variant %s will use Koji tag %s.",
|
"Module %s in variant %s will use Koji tag %s.",
|
||||||
nsvc, variant, module_tag
|
nsvc,
|
||||||
|
variant,
|
||||||
|
module_tag,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Store mapping module-uid --> koji_tag into variant. This is
|
# Store mapping module-uid --> koji_tag into variant. This is
|
||||||
@ -543,14 +559,18 @@ def _find_old_file_cache_path(compose, tag_name):
|
|||||||
compose.ci_base.release.short,
|
compose.ci_base.release.short,
|
||||||
compose.ci_base.release.version,
|
compose.ci_base.release.version,
|
||||||
compose.ci_base.release.type_suffix,
|
compose.ci_base.release.type_suffix,
|
||||||
compose.ci_base.base_product.short if compose.ci_base.release.is_layered else None,
|
compose.ci_base.base_product.short
|
||||||
compose.ci_base.base_product.version if compose.ci_base.release.is_layered else None,
|
if compose.ci_base.release.is_layered
|
||||||
|
else None,
|
||||||
|
compose.ci_base.base_product.version
|
||||||
|
if compose.ci_base.release.is_layered
|
||||||
|
else None,
|
||||||
)
|
)
|
||||||
if not old_compose_path:
|
if not old_compose_path:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
old_file_cache_dir = compose.paths.work.pkgset_file_cache(tag_name)
|
old_file_cache_dir = compose.paths.work.pkgset_file_cache(tag_name)
|
||||||
rel_dir = relative_path(old_file_cache_dir, compose.topdir.rstrip('/') + '/')
|
rel_dir = relative_path(old_file_cache_dir, compose.topdir.rstrip("/") + "/")
|
||||||
old_file_cache_path = os.path.join(old_compose_path, rel_dir)
|
old_file_cache_path = os.path.join(old_compose_path, rel_dir)
|
||||||
if not os.path.exists(old_file_cache_path):
|
if not os.path.exists(old_file_cache_path):
|
||||||
return None
|
return None
|
||||||
@ -573,12 +593,15 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
# here. This only works if we are not creating bootable images. Those could
|
# here. This only works if we are not creating bootable images. Those could
|
||||||
# include packages that are not in the compose.
|
# include packages that are not in the compose.
|
||||||
packages_to_gather, groups = get_packages_to_gather(
|
packages_to_gather, groups = get_packages_to_gather(
|
||||||
compose, include_arch=False, include_prepopulated=True)
|
compose, include_arch=False, include_prepopulated=True
|
||||||
|
)
|
||||||
if groups:
|
if groups:
|
||||||
comps = CompsWrapper(compose.paths.work.comps())
|
comps = CompsWrapper(compose.paths.work.comps())
|
||||||
for group in groups:
|
for group in groups:
|
||||||
packages_to_gather += comps.get_packages(group)
|
packages_to_gather += comps.get_packages(group)
|
||||||
if compose.conf["gather_method"] == "nodeps" and not compose.conf.get('buildinstall_method'):
|
if compose.conf["gather_method"] == "nodeps" and not compose.conf.get(
|
||||||
|
"buildinstall_method"
|
||||||
|
):
|
||||||
populate_only_packages_to_gather = True
|
populate_only_packages_to_gather = True
|
||||||
else:
|
else:
|
||||||
populate_only_packages_to_gather = False
|
populate_only_packages_to_gather = False
|
||||||
@ -605,9 +628,12 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
raise ValueError(
|
raise ValueError(
|
||||||
"pygobject module or libmodulemd library is not installed, "
|
"pygobject module or libmodulemd library is not installed, "
|
||||||
"support for modules is disabled, but compose contains "
|
"support for modules is disabled, but compose contains "
|
||||||
"modules.")
|
"modules."
|
||||||
|
)
|
||||||
|
|
||||||
if modular_koji_tags or (compose.conf["pkgset_koji_module_tag"] and variant.modules):
|
if modular_koji_tags or (
|
||||||
|
compose.conf["pkgset_koji_module_tag"] and variant.modules
|
||||||
|
):
|
||||||
# List modules tagged in particular tags.
|
# List modules tagged in particular tags.
|
||||||
_get_modules_from_koji_tags(
|
_get_modules_from_koji_tags(
|
||||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd
|
||||||
@ -647,12 +673,16 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
|
|
||||||
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
||||||
compose_tag,
|
compose_tag,
|
||||||
koji_wrapper, compose.conf["sigkeys"], logger=compose._logger,
|
koji_wrapper,
|
||||||
arches=all_arches, packages=packages_to_gather,
|
compose.conf["sigkeys"],
|
||||||
|
logger=compose._logger,
|
||||||
|
arches=all_arches,
|
||||||
|
packages=packages_to_gather,
|
||||||
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||||
populate_only_packages=populate_only_packages_to_gather,
|
populate_only_packages=populate_only_packages_to_gather,
|
||||||
cache_region=compose.cache_region,
|
cache_region=compose.cache_region,
|
||||||
extra_builds=extra_builds)
|
extra_builds=extra_builds,
|
||||||
|
)
|
||||||
|
|
||||||
# Check if we have cache for this tag from previous compose. If so, use
|
# Check if we have cache for this tag from previous compose. If so, use
|
||||||
# it.
|
# it.
|
||||||
@ -726,7 +756,9 @@ def get_koji_event_info(compose, koji_wrapper):
|
|||||||
compose.log_info("Getting koji event")
|
compose.log_info("Getting koji event")
|
||||||
result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file)
|
result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file)
|
||||||
if compose.koji_event:
|
if compose.koji_event:
|
||||||
compose.log_info("Setting koji event to a custom value: %s" % compose.koji_event)
|
compose.log_info(
|
||||||
|
"Setting koji event to a custom value: %s" % compose.koji_event
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
compose.log_info("Koji event: %s" % result["id"])
|
compose.log_info("Koji event: %s" % result["id"])
|
||||||
|
|
||||||
|
@ -47,7 +47,9 @@ def get_pkgset_from_repos(compose):
|
|||||||
|
|
||||||
pool = LinkerPool.with_workers(10, "hardlink-or-copy", logger=compose._logger)
|
pool = LinkerPool.with_workers(10, "hardlink-or-copy", logger=compose._logger)
|
||||||
|
|
||||||
path_prefix = os.path.join(compose.paths.work.topdir(arch="global"), "download") + "/"
|
path_prefix = (
|
||||||
|
os.path.join(compose.paths.work.topdir(arch="global"), "download") + "/"
|
||||||
|
)
|
||||||
makedirs(path_prefix)
|
makedirs(path_prefix)
|
||||||
|
|
||||||
seen_packages = set()
|
seen_packages = set()
|
||||||
@ -55,7 +57,8 @@ def get_pkgset_from_repos(compose):
|
|||||||
# write a pungi config for remote repos and a local comps repo
|
# write a pungi config for remote repos and a local comps repo
|
||||||
repos = {}
|
repos = {}
|
||||||
for num, repo in enumerate(
|
for num, repo in enumerate(
|
||||||
compose.conf["pkgset_repos"].get(arch, []) + compose.conf["pkgset_repos"].get("*", [])
|
compose.conf["pkgset_repos"].get(arch, [])
|
||||||
|
+ compose.conf["pkgset_repos"].get("*", [])
|
||||||
):
|
):
|
||||||
repo_path = repo
|
repo_path = repo
|
||||||
if "://" not in repo_path:
|
if "://" not in repo_path:
|
||||||
@ -74,16 +77,24 @@ def get_pkgset_from_repos(compose):
|
|||||||
pungi_dir = compose.paths.work.pungi_download_dir(arch)
|
pungi_dir = compose.paths.work.pungi_download_dir(arch)
|
||||||
|
|
||||||
backends = {
|
backends = {
|
||||||
'yum': pungi.get_pungi_cmd,
|
"yum": pungi.get_pungi_cmd,
|
||||||
'dnf': pungi.get_pungi_cmd_dnf,
|
"dnf": pungi.get_pungi_cmd_dnf,
|
||||||
}
|
}
|
||||||
get_cmd = backends[compose.conf['gather_backend']]
|
get_cmd = backends[compose.conf["gather_backend"]]
|
||||||
cmd = get_cmd(pungi_conf, destdir=pungi_dir, name="FOO",
|
cmd = get_cmd(
|
||||||
selfhosting=True, fulltree=True, multilib_methods=["all"],
|
pungi_conf,
|
||||||
nodownload=False, full_archlist=True, arch=arch,
|
destdir=pungi_dir,
|
||||||
|
name="FOO",
|
||||||
|
selfhosting=True,
|
||||||
|
fulltree=True,
|
||||||
|
multilib_methods=["all"],
|
||||||
|
nodownload=False,
|
||||||
|
full_archlist=True,
|
||||||
|
arch=arch,
|
||||||
cache_dir=compose.paths.work.pungi_cache_dir(arch=arch),
|
cache_dir=compose.paths.work.pungi_cache_dir(arch=arch),
|
||||||
profiler=profiler)
|
profiler=profiler,
|
||||||
if compose.conf['gather_backend'] == 'yum':
|
)
|
||||||
|
if compose.conf["gather_backend"] == "yum":
|
||||||
cmd.append("--force")
|
cmd.append("--force")
|
||||||
|
|
||||||
# TODO: runroot
|
# TODO: runroot
|
||||||
@ -127,7 +138,9 @@ def populate_global_pkgset(compose, file_list, path_prefix):
|
|||||||
return pkgset
|
return pkgset
|
||||||
|
|
||||||
|
|
||||||
def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, package_set=None):
|
def write_pungi_config(
|
||||||
|
compose, arch, variant, repos=None, comps_repo=None, package_set=None
|
||||||
|
):
|
||||||
"""write pungi config (kickstart) for arch/variant"""
|
"""write pungi config (kickstart) for arch/variant"""
|
||||||
pungi_wrapper = PungiWrapper()
|
pungi_wrapper = PungiWrapper()
|
||||||
pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)
|
pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch)
|
||||||
@ -142,4 +155,12 @@ def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, pack
|
|||||||
packages.append("system-release")
|
packages.append("system-release")
|
||||||
|
|
||||||
prepopulate = get_prepopulate_packages(compose, arch, None)
|
prepopulate = get_prepopulate_packages(compose, arch, None)
|
||||||
pungi_wrapper.write_kickstart(ks_path=pungi_cfg, repos=repos, groups=grps, packages=packages, exclude_packages=[], comps_repo=None, prepopulate=prepopulate)
|
pungi_wrapper.write_kickstart(
|
||||||
|
ks_path=pungi_cfg,
|
||||||
|
repos=repos,
|
||||||
|
groups=grps,
|
||||||
|
packages=packages,
|
||||||
|
exclude_packages=[],
|
||||||
|
comps_repo=None,
|
||||||
|
prepopulate=prepopulate,
|
||||||
|
)
|
||||||
|
@ -47,15 +47,19 @@ def run_repoclosure(compose):
|
|||||||
if variant.is_empty:
|
if variant.is_empty:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
conf = get_arch_variant_data(compose.conf, 'repoclosure_strictness', arch, variant)
|
conf = get_arch_variant_data(
|
||||||
if conf and conf[-1] == 'off':
|
compose.conf, "repoclosure_strictness", arch, variant
|
||||||
|
)
|
||||||
|
if conf and conf[-1] == "off":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
prefix = "%s-repoclosure" % compose.compose_id
|
prefix = "%s-repoclosure" % compose.compose_id
|
||||||
lookaside = {}
|
lookaside = {}
|
||||||
if variant.parent:
|
if variant.parent:
|
||||||
repo_id = "%s-%s.%s" % (prefix, variant.parent.uid, arch)
|
repo_id = "%s-%s.%s" % (prefix, variant.parent.uid, arch)
|
||||||
repo_dir = compose.paths.compose.repository(arch=arch, variant=variant.parent)
|
repo_dir = compose.paths.compose.repository(
|
||||||
|
arch=arch, variant=variant.parent
|
||||||
|
)
|
||||||
lookaside[repo_id] = repo_dir
|
lookaside[repo_id] = repo_dir
|
||||||
|
|
||||||
repos = {}
|
repos = {}
|
||||||
@ -63,8 +67,12 @@ def run_repoclosure(compose):
|
|||||||
repo_dir = compose.paths.compose.repository(arch=arch, variant=variant)
|
repo_dir = compose.paths.compose.repository(arch=arch, variant=variant)
|
||||||
repos[repo_id] = repo_dir
|
repos[repo_id] = repo_dir
|
||||||
|
|
||||||
for i, lookaside_url in enumerate(get_lookaside_repos(compose, arch, variant)):
|
for i, lookaside_url in enumerate(
|
||||||
lookaside["%s-lookaside-%s.%s-%s" % (compose.compose_id, variant.uid, arch, i)] = lookaside_url
|
get_lookaside_repos(compose, arch, variant)
|
||||||
|
):
|
||||||
|
lookaside[
|
||||||
|
"%s-lookaside-%s.%s-%s" % (compose.compose_id, variant.uid, arch, i)
|
||||||
|
] = lookaside_url
|
||||||
|
|
||||||
logfile = compose.paths.log.log_file(arch, "repoclosure-%s" % variant)
|
logfile = compose.paths.log.log_file(arch, "repoclosure-%s" % variant)
|
||||||
|
|
||||||
@ -80,11 +88,12 @@ def run_repoclosure(compose):
|
|||||||
else:
|
else:
|
||||||
_run_repoclosure_cmd(compose, repos, lookaside, arches, logfile)
|
_run_repoclosure_cmd(compose, repos, lookaside, arches, logfile)
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
if conf and conf[-1] == 'fatal':
|
if conf and conf[-1] == "fatal":
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
compose.log_warning('Repoclosure failed for %s.%s\n%s'
|
compose.log_warning(
|
||||||
% (variant.uid, arch, exc))
|
"Repoclosure failed for %s.%s\n%s" % (variant.uid, arch, exc)
|
||||||
|
)
|
||||||
finally:
|
finally:
|
||||||
if methods != "hybrid":
|
if methods != "hybrid":
|
||||||
_delete_repoclosure_cache_dirs(compose)
|
_delete_repoclosure_cache_dirs(compose)
|
||||||
@ -93,16 +102,18 @@ def run_repoclosure(compose):
|
|||||||
|
|
||||||
|
|
||||||
def _delete_repoclosure_cache_dirs(compose):
|
def _delete_repoclosure_cache_dirs(compose):
|
||||||
if 'dnf' == compose.conf["repoclosure_backend"]:
|
if "dnf" == compose.conf["repoclosure_backend"]:
|
||||||
from dnf.const import SYSTEM_CACHEDIR
|
from dnf.const import SYSTEM_CACHEDIR
|
||||||
from dnf.util import am_i_root
|
from dnf.util import am_i_root
|
||||||
from dnf.yum.misc import getCacheDir
|
from dnf.yum.misc import getCacheDir
|
||||||
|
|
||||||
if am_i_root():
|
if am_i_root():
|
||||||
top_cache_dir = SYSTEM_CACHEDIR
|
top_cache_dir = SYSTEM_CACHEDIR
|
||||||
else:
|
else:
|
||||||
top_cache_dir = getCacheDir()
|
top_cache_dir = getCacheDir()
|
||||||
else:
|
else:
|
||||||
from yum.misc import getCacheDir
|
from yum.misc import getCacheDir
|
||||||
|
|
||||||
top_cache_dir = getCacheDir()
|
top_cache_dir = getCacheDir()
|
||||||
|
|
||||||
for name in os.listdir(top_cache_dir):
|
for name in os.listdir(top_cache_dir):
|
||||||
@ -115,8 +126,12 @@ def _delete_repoclosure_cache_dirs(compose):
|
|||||||
|
|
||||||
|
|
||||||
def _run_repoclosure_cmd(compose, repos, lookaside, arches, logfile):
|
def _run_repoclosure_cmd(compose, repos, lookaside, arches, logfile):
|
||||||
cmd = repoclosure.get_repoclosure_cmd(backend=compose.conf["repoclosure_backend"],
|
cmd = repoclosure.get_repoclosure_cmd(
|
||||||
repos=repos, lookaside=lookaside, arch=arches)
|
backend=compose.conf["repoclosure_backend"],
|
||||||
|
repos=repos,
|
||||||
|
lookaside=lookaside,
|
||||||
|
arch=arches,
|
||||||
|
)
|
||||||
# Use temp working directory directory as workaround for
|
# Use temp working directory directory as workaround for
|
||||||
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
|
# https://bugzilla.redhat.com/show_bug.cgi?id=795137
|
||||||
with temp_dir(prefix="repoclosure_") as tmp_dir:
|
with temp_dir(prefix="repoclosure_") as tmp_dir:
|
||||||
@ -147,22 +162,26 @@ def check_image_sanity(compose):
|
|||||||
|
|
||||||
def check_sanity(compose, variant, arch, image):
|
def check_sanity(compose, variant, arch, image):
|
||||||
path = os.path.join(compose.paths.compose.topdir(), image.path)
|
path = os.path.join(compose.paths.compose.topdir(), image.path)
|
||||||
deliverable = getattr(image, 'deliverable')
|
deliverable = getattr(image, "deliverable")
|
||||||
can_fail = getattr(image, 'can_fail', False)
|
can_fail = getattr(image, "can_fail", False)
|
||||||
with failable(compose, can_fail, variant, arch, deliverable,
|
with failable(
|
||||||
subvariant=image.subvariant):
|
compose, can_fail, variant, arch, deliverable, subvariant=image.subvariant
|
||||||
with open(path, 'rb') as f:
|
):
|
||||||
|
with open(path, "rb") as f:
|
||||||
iso = is_iso(f)
|
iso = is_iso(f)
|
||||||
if image.format == 'iso' and not iso:
|
if image.format == "iso" and not iso:
|
||||||
raise RuntimeError('%s does not look like an ISO file' % path)
|
raise RuntimeError("%s does not look like an ISO file" % path)
|
||||||
if (image.arch in ('x86_64', 'i386') and
|
if (
|
||||||
image.bootable and
|
image.arch in ("x86_64", "i386")
|
||||||
not has_mbr(f) and
|
and image.bootable
|
||||||
not has_gpt(f) and
|
and not has_mbr(f)
|
||||||
not (iso and has_eltorito(f))):
|
and not has_gpt(f)
|
||||||
|
and not (iso and has_eltorito(f))
|
||||||
|
):
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
'%s is supposed to be bootable, but does not have MBR nor '
|
"%s is supposed to be bootable, but does not have MBR nor "
|
||||||
'GPT nor is it a bootable ISO' % path)
|
"GPT nor is it a bootable ISO" % path
|
||||||
|
)
|
||||||
# If exception is raised above, failable may catch it, in which case
|
# If exception is raised above, failable may catch it, in which case
|
||||||
# nothing else will happen.
|
# nothing else will happen.
|
||||||
|
|
||||||
@ -174,19 +193,19 @@ def _check_magic(f, offset, bytes):
|
|||||||
|
|
||||||
|
|
||||||
def is_iso(f):
|
def is_iso(f):
|
||||||
return _check_magic(f, 0x8001, b'CD001')
|
return _check_magic(f, 0x8001, b"CD001")
|
||||||
|
|
||||||
|
|
||||||
def has_mbr(f):
|
def has_mbr(f):
|
||||||
return _check_magic(f, 0x1fe, b'\x55\xAA')
|
return _check_magic(f, 0x1FE, b"\x55\xAA")
|
||||||
|
|
||||||
|
|
||||||
def has_gpt(f):
|
def has_gpt(f):
|
||||||
return _check_magic(f, 0x200, b'EFI PART')
|
return _check_magic(f, 0x200, b"EFI PART")
|
||||||
|
|
||||||
|
|
||||||
def has_eltorito(f):
|
def has_eltorito(f):
|
||||||
return _check_magic(f, 0x8801, b'CD001\1EL TORITO SPECIFICATION')
|
return _check_magic(f, 0x8801, b"CD001\1EL TORITO SPECIFICATION")
|
||||||
|
|
||||||
|
|
||||||
def check_size_limit(compose, variant, arch, img):
|
def check_size_limit(compose, variant, arch, img):
|
||||||
@ -207,7 +226,9 @@ def check_size_limit(compose, variant, arch, img):
|
|||||||
compose.conf, "createiso_max_size_is_strict", arch, variant
|
compose.conf, "createiso_max_size_is_strict", arch, variant
|
||||||
)
|
)
|
||||||
msg = "ISO %s is too big. Expected max %dB, got %dB" % (
|
msg = "ISO %s is too big. Expected max %dB, got %dB" % (
|
||||||
img.path, limit, img.size
|
img.path,
|
||||||
|
limit,
|
||||||
|
img.size,
|
||||||
)
|
)
|
||||||
if any(is_strict):
|
if any(is_strict):
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
@ -17,6 +17,7 @@ class WeaverPhase(object):
|
|||||||
:param phases_schema: two-dimensional array of phases. Top dimension
|
:param phases_schema: two-dimensional array of phases. Top dimension
|
||||||
denotes particular pipelines. Second dimension contains phases.
|
denotes particular pipelines. Second dimension contains phases.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
name = "weaver"
|
name = "weaver"
|
||||||
|
|
||||||
def __init__(self, compose, phases_schema):
|
def __init__(self, compose, phases_schema):
|
||||||
@ -32,7 +33,10 @@ class WeaverPhase(object):
|
|||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
if self.finished:
|
if self.finished:
|
||||||
msg = "Phase '%s' has already finished and can not be started twice" % self.name
|
msg = (
|
||||||
|
"Phase '%s' has already finished and can not be started twice"
|
||||||
|
% self.name
|
||||||
|
)
|
||||||
self.pool.log_error(msg)
|
self.pool.log_error(msg)
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
@ -59,10 +63,15 @@ class PipelineThread(WorkerThread):
|
|||||||
"""
|
"""
|
||||||
Launches phases in pipeline sequentially
|
Launches phases in pipeline sequentially
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def process(self, item, num):
|
def process(self, item, num):
|
||||||
pipeline = shortcuts.force_list(item)
|
pipeline = shortcuts.force_list(item)
|
||||||
phases_names = ", ".join(phase.name for phase in pipeline)
|
phases_names = ", ".join(phase.name for phase in pipeline)
|
||||||
msg = "Running pipeline (%d/%d). Phases: %s" % (num, self.pool.queue_total, phases_names)
|
msg = "Running pipeline (%d/%d). Phases: %s" % (
|
||||||
|
num,
|
||||||
|
self.pool.queue_total,
|
||||||
|
phases_names,
|
||||||
|
)
|
||||||
self.pool.log_info("[BEGIN] %s" % (msg,))
|
self.pool.log_info("[BEGIN] %s" % (msg,))
|
||||||
|
|
||||||
for phase in pipeline:
|
for phase in pipeline:
|
||||||
|
@ -64,6 +64,7 @@ class Profiler(object):
|
|||||||
def decorated(*args, **kwargs):
|
def decorated(*args, **kwargs):
|
||||||
with self:
|
with self:
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
return decorated
|
return decorated
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -72,5 +73,6 @@ class Profiler(object):
|
|||||||
results = cls._data.items()
|
results = cls._data.items()
|
||||||
results = sorted(results, key=lambda x: x[1]["time"], reverse=True)
|
results = sorted(results, key=lambda x: x[1]["time"], reverse=True)
|
||||||
for name, data in results:
|
for name, data in results:
|
||||||
print(" %6.2f %5d %s" % (data["time"], data["calls"], name),
|
print(
|
||||||
file=sys.stdout)
|
" %6.2f %5d %s" % (data["time"], data["calls"], name), file=sys.stdout
|
||||||
|
)
|
||||||
|
@ -83,9 +83,13 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
|
koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
|
||||||
koji_cmd = koji_wrapper.get_runroot_cmd(
|
koji_cmd = koji_wrapper.get_runroot_cmd(
|
||||||
runroot_tag, arch, command,
|
runroot_tag,
|
||||||
channel=runroot_channel, use_shell=True,
|
arch,
|
||||||
packages=packages, **kwargs
|
command,
|
||||||
|
channel=runroot_channel,
|
||||||
|
use_shell=True,
|
||||||
|
packages=packages,
|
||||||
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
output = koji_wrapper.run_runroot_cmd(koji_cmd, log_file=log_file)
|
output = koji_wrapper.run_runroot_cmd(koji_cmd, log_file=log_file)
|
||||||
@ -115,8 +119,15 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
def _log_file(self, base, suffix):
|
def _log_file(self, base, suffix):
|
||||||
return base.replace(".log", "." + suffix + ".log")
|
return base.replace(".log", "." + suffix + ".log")
|
||||||
|
|
||||||
def _run_openssh(self, command, log_file=None, arch=None, packages=None,
|
def _run_openssh(
|
||||||
chown_paths=None, **kwargs):
|
self,
|
||||||
|
command,
|
||||||
|
log_file=None,
|
||||||
|
arch=None,
|
||||||
|
packages=None,
|
||||||
|
chown_paths=None,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Runs the runroot command on remote machine using ssh.
|
Runs the runroot command on remote machine using ssh.
|
||||||
"""
|
"""
|
||||||
@ -176,7 +187,9 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
fmt_dict["runroot_key"] = runroot_key
|
fmt_dict["runroot_key"] = runroot_key
|
||||||
self._ssh_run(hostname, user, run_template, fmt_dict, log_file=log_file)
|
self._ssh_run(hostname, user, run_template, fmt_dict, log_file=log_file)
|
||||||
|
|
||||||
fmt_dict["command"] = "rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'"
|
fmt_dict[
|
||||||
|
"command"
|
||||||
|
] = "rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'"
|
||||||
buildroot_rpms = self._ssh_run(
|
buildroot_rpms = self._ssh_run(
|
||||||
hostname,
|
hostname,
|
||||||
user,
|
user,
|
||||||
@ -254,8 +267,13 @@ class Runroot(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
|
koji_wrapper = kojiwrapper.KojiWrapper(self.compose.conf["koji_profile"])
|
||||||
koji_cmd = koji_wrapper.get_pungi_buildinstall_cmd(
|
koji_cmd = koji_wrapper.get_pungi_buildinstall_cmd(
|
||||||
runroot_tag, arch, args, channel=runroot_channel,
|
runroot_tag,
|
||||||
chown_uid=os.getuid(), **kwargs)
|
arch,
|
||||||
|
args,
|
||||||
|
channel=runroot_channel,
|
||||||
|
chown_uid=os.getuid(),
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
output = koji_wrapper.run_runroot_cmd(koji_cmd, log_file=log_file)
|
output = koji_wrapper.run_runroot_cmd(koji_cmd, log_file=log_file)
|
||||||
if output["retcode"] != 0:
|
if output["retcode"] != 0:
|
||||||
|
@ -11,24 +11,58 @@ from pungi.wrappers.comps import CompsFilter
|
|||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("--output", help="redirect output to a file")
|
parser.add_argument("--output", help="redirect output to a file")
|
||||||
parser.add_argument("--arch", required=True,
|
parser.add_argument(
|
||||||
help="filter groups and packages according to an arch")
|
"--arch", required=True, help="filter groups and packages according to an arch"
|
||||||
parser.add_argument("--arch-only-groups", default=False, action="store_true",
|
)
|
||||||
help="keep only arch groups, remove the rest")
|
parser.add_argument(
|
||||||
parser.add_argument("--arch-only-packages", default=False, action="store_true",
|
"--arch-only-groups",
|
||||||
help="keep only arch packages, remove the rest")
|
default=False,
|
||||||
parser.add_argument("--arch-only-environments", default=False, action="store_true",
|
action="store_true",
|
||||||
help="keep only arch environments, remove the rest")
|
help="keep only arch groups, remove the rest",
|
||||||
parser.add_argument("--remove-categories", default=False, action="store_true",
|
)
|
||||||
help="remove all categories")
|
parser.add_argument(
|
||||||
parser.add_argument("--remove-langpacks", default=False, action="store_true",
|
"--arch-only-packages",
|
||||||
help="remove the langpacks section")
|
default=False,
|
||||||
parser.add_argument("--remove-translations", default=False, action="store_true",
|
action="store_true",
|
||||||
help="remove all translations")
|
help="keep only arch packages, remove the rest",
|
||||||
parser.add_argument("--remove-environments", default=False, action="store_true",
|
)
|
||||||
help="remove all environment sections")
|
parser.add_argument(
|
||||||
parser.add_argument("--keep-empty-group", default=[], action="append", metavar="GROUPID",
|
"--arch-only-environments",
|
||||||
help="keep groups even if they are empty")
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="keep only arch environments, remove the rest",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--remove-categories",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="remove all categories",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--remove-langpacks",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="remove the langpacks section",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--remove-translations",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="remove all translations",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--remove-environments",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="remove all environment sections",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--keep-empty-group",
|
||||||
|
default=[],
|
||||||
|
action="append",
|
||||||
|
metavar="GROUPID",
|
||||||
|
help="keep groups even if they are empty",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--lookaside-group",
|
"--lookaside-group",
|
||||||
default=[],
|
default=[],
|
||||||
@ -36,13 +70,22 @@ def main():
|
|||||||
metavar="GROUPID",
|
metavar="GROUPID",
|
||||||
help="keep this group in environments even if they are not defined in the comps",
|
help="keep this group in environments even if they are not defined in the comps",
|
||||||
)
|
)
|
||||||
parser.add_argument("--no-cleanup", default=False, action="store_true",
|
parser.add_argument(
|
||||||
help="don't remove empty groups and categories")
|
"--no-cleanup",
|
||||||
parser.add_argument("--no-reindent", default=False, action="store_true",
|
default=False,
|
||||||
help="don't re-indent the output")
|
action="store_true",
|
||||||
parser.add_argument("comps_file", metavar='COMPS_FILE')
|
help="don't remove empty groups and categories",
|
||||||
parser.add_argument('--variant',
|
)
|
||||||
help='filter groups and packages according to variant name')
|
parser.add_argument(
|
||||||
|
"--no-reindent",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="don't re-indent the output",
|
||||||
|
)
|
||||||
|
parser.add_argument("comps_file", metavar="COMPS_FILE")
|
||||||
|
parser.add_argument(
|
||||||
|
"--variant", help="filter groups and packages according to variant name"
|
||||||
|
)
|
||||||
|
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
@ -67,4 +110,4 @@ def main():
|
|||||||
if opts.remove_environments:
|
if opts.remove_environments:
|
||||||
f.remove_environments()
|
f.remove_environments()
|
||||||
|
|
||||||
f.write(open(opts.output, 'wb') if opts.output else sys.stdout)
|
f.write(open(opts.output, "wb") if opts.output else sys.stdout)
|
||||||
|
@ -34,29 +34,29 @@ class ValidationCompose(pungi.compose.Compose):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def old_composes(self):
|
def old_composes(self):
|
||||||
return '/dummy' if self.has_old_composes else None
|
return "/dummy" if self.has_old_composes else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def compose_id(self):
|
def compose_id(self):
|
||||||
return 'Dummy-1.0-20160811.t.0'
|
return "Dummy-1.0-20160811.t.0"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def compose_type(self):
|
def compose_type(self):
|
||||||
return 'test'
|
return "test"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def compose_date(self):
|
def compose_date(self):
|
||||||
return '20160811'
|
return "20160811"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def compose_respin(self):
|
def compose_respin(self):
|
||||||
return '0'
|
return "0"
|
||||||
|
|
||||||
|
|
||||||
def read_variants(compose, config):
|
def read_variants(compose, config):
|
||||||
with pungi.util.temp_dir() as tmp_dir:
|
with pungi.util.temp_dir() as tmp_dir:
|
||||||
scm_dict = compose.conf["variants_file"]
|
scm_dict = compose.conf["variants_file"]
|
||||||
if isinstance(scm_dict, six.string_types) and scm_dict[0] != '/':
|
if isinstance(scm_dict, six.string_types) and scm_dict[0] != "/":
|
||||||
config_dir = os.path.dirname(config)
|
config_dir = os.path.dirname(config)
|
||||||
scm_dict = os.path.join(config_dir, scm_dict)
|
scm_dict = os.path.join(config_dir, scm_dict)
|
||||||
files = pungi.wrappers.scm.get_file_from_scm(scm_dict, tmp_dir)
|
files = pungi.wrappers.scm.get_file_from_scm(scm_dict, tmp_dir)
|
||||||
@ -144,24 +144,29 @@ def run(config, topdir, has_old, offline, defined_variables, schema_overrides):
|
|||||||
|
|
||||||
class DumpSchemaAction(argparse.Action):
|
class DumpSchemaAction(argparse.Action):
|
||||||
def __call__(self, parser, ns, values, option_string=None):
|
def __call__(self, parser, ns, values, option_string=None):
|
||||||
json.dump(pungi.checks.make_schema(), sys.stdout,
|
json.dump(pungi.checks.make_schema(), sys.stdout, sort_keys=True, indent=4)
|
||||||
sort_keys=True, indent=4)
|
print("")
|
||||||
print('')
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args=None):
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('--dump-schema', nargs=0, action=DumpSchemaAction,
|
|
||||||
help='print JSON Schema of configuration and exit')
|
|
||||||
parser.add_argument('config', metavar='CONFIG',
|
|
||||||
help='configuration file to validate')
|
|
||||||
parser.add_argument('--old-composes', action='store_true',
|
|
||||||
help='indicate if pungi-koji will be run with --old-composes option')
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--offline",
|
"--dump-schema",
|
||||||
|
nargs=0,
|
||||||
|
action=DumpSchemaAction,
|
||||||
|
help="print JSON Schema of configuration and exit",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"config", metavar="CONFIG", help="configuration file to validate"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--old-composes",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Do not validate git references in URLs",
|
help="indicate if pungi-koji will be run with --old-composes option",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--offline", action="store_true", help="Do not validate git references in URLs",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-e",
|
"-e",
|
||||||
|
@ -18,10 +18,7 @@ def parse_args():
|
|||||||
parser = argparse.ArgumentParser(add_help=True)
|
parser = argparse.ArgumentParser(add_help=True)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'compose',
|
"compose", metavar="<compose-path>", nargs=1, help="path to compose",
|
||||||
metavar='<compose-path>',
|
|
||||||
nargs=1,
|
|
||||||
help='path to compose',
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
@ -8,18 +8,18 @@ import sys
|
|||||||
|
|
||||||
|
|
||||||
def send(cmd, data):
|
def send(cmd, data):
|
||||||
topic = 'compose.%s' % cmd.replace('-', '.').lower()
|
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
||||||
fedmsg.publish(topic=topic, modname='pungi', msg=data)
|
fedmsg.publish(topic=topic, modname="pungi", msg=data)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('cmd')
|
parser.add_argument("cmd")
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
config = fedmsg.config.load_config()
|
config = fedmsg.config.load_config()
|
||||||
config['active'] = True # Connect out to a fedmsg-relay instance
|
config["active"] = True # Connect out to a fedmsg-relay instance
|
||||||
config['cert_prefix'] = 'releng' # Use this cert.
|
config["cert_prefix"] = "releng" # Use this cert.
|
||||||
fedmsg.init(**config)
|
fedmsg.init(**config)
|
||||||
|
|
||||||
data = json.load(sys.stdin)
|
data = json.load(sys.stdin)
|
||||||
|
@ -22,24 +22,32 @@ from pungi_utils import patch_iso
|
|||||||
|
|
||||||
def main(args=None):
|
def main(args=None):
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('-v', '--verbose', action='store_true',
|
parser.add_argument(
|
||||||
help='Print debugging information')
|
"-v", "--verbose", action="store_true", help="Print debugging information"
|
||||||
parser.add_argument('--supported', choices=('true', 'false'),
|
)
|
||||||
help='Override supported bit on the ISO')
|
parser.add_argument(
|
||||||
parser.add_argument('--volume-id',
|
"--supported",
|
||||||
help='Override volume ID on the ISO')
|
choices=("true", "false"),
|
||||||
parser.add_argument('--force-arch',
|
help="Override supported bit on the ISO",
|
||||||
help='Treat the ISO as bootable on given architecture')
|
)
|
||||||
parser.add_argument('target', metavar='TARGET_ISO',
|
parser.add_argument("--volume-id", help="Override volume ID on the ISO")
|
||||||
help='which file to write the result to')
|
parser.add_argument(
|
||||||
parser.add_argument('source', metavar='SOURCE_ISO',
|
"--force-arch", help="Treat the ISO as bootable on given architecture"
|
||||||
help='source ISO to work with')
|
)
|
||||||
parser.add_argument('dirs', nargs="+", metavar='GRAFT_DIR',
|
parser.add_argument(
|
||||||
help='extra directories to graft on the ISO')
|
"target", metavar="TARGET_ISO", help="which file to write the result to"
|
||||||
|
)
|
||||||
|
parser.add_argument("source", metavar="SOURCE_ISO", help="source ISO to work with")
|
||||||
|
parser.add_argument(
|
||||||
|
"dirs",
|
||||||
|
nargs="+",
|
||||||
|
metavar="GRAFT_DIR",
|
||||||
|
help="extra directories to graft on the ISO",
|
||||||
|
)
|
||||||
opts = parser.parse_args(args)
|
opts = parser.parse_args(args)
|
||||||
|
|
||||||
level = logging.DEBUG if opts.verbose else logging.INFO
|
level = logging.DEBUG if opts.verbose else logging.INFO
|
||||||
format = '%(levelname)s: %(message)s'
|
format = "%(levelname)s: %(message)s"
|
||||||
logging.basicConfig(level=level, format=format)
|
logging.basicConfig(level=level, format=format)
|
||||||
log = logging.getLogger()
|
log = logging.getLogger()
|
||||||
|
|
||||||
|
@ -30,142 +30,290 @@ def get_arguments(config):
|
|||||||
|
|
||||||
class SetConfig(Action):
|
class SetConfig(Action):
|
||||||
def __call__(self, parser, namespace, value, option_string=None):
|
def __call__(self, parser, namespace, value, option_string=None):
|
||||||
config.set('pungi', self.dest, value)
|
config.set("pungi", self.dest, value)
|
||||||
|
|
||||||
parser.add_argument('--version', action='version', version=get_full_version())
|
parser.add_argument("--version", action="version", version=get_full_version())
|
||||||
|
|
||||||
# Pulled in from config file to be cli options as part of pykickstart conversion
|
# Pulled in from config file to be cli options as part of pykickstart conversion
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--name", dest="family", type=str, action=SetConfig,
|
"--name",
|
||||||
help='the name for your distribution (defaults to "Fedora"), DEPRECATED')
|
dest="family",
|
||||||
|
type=str,
|
||||||
|
action=SetConfig,
|
||||||
|
help='the name for your distribution (defaults to "Fedora"), DEPRECATED',
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--family", dest="family", action=SetConfig,
|
"--family",
|
||||||
help='the family name for your distribution (defaults to "Fedora")')
|
dest="family",
|
||||||
|
action=SetConfig,
|
||||||
|
help='the family name for your distribution (defaults to "Fedora")',
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--ver", dest="version", action=SetConfig,
|
"--ver",
|
||||||
help='the version of your distribution (defaults to datestamp)')
|
dest="version",
|
||||||
|
action=SetConfig,
|
||||||
|
help="the version of your distribution (defaults to datestamp)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--flavor", dest="variant", action=SetConfig,
|
"--flavor",
|
||||||
help='the flavor of your distribution spin (optional), DEPRECATED')
|
dest="variant",
|
||||||
|
action=SetConfig,
|
||||||
|
help="the flavor of your distribution spin (optional), DEPRECATED",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--variant", dest="variant", action=SetConfig,
|
"--variant",
|
||||||
help='the variant of your distribution spin (optional)')
|
dest="variant",
|
||||||
|
action=SetConfig,
|
||||||
|
help="the variant of your distribution spin (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--destdir", dest="destdir", action=SetConfig,
|
"--destdir",
|
||||||
help='destination directory (defaults to current directory)')
|
dest="destdir",
|
||||||
|
action=SetConfig,
|
||||||
|
help="destination directory (defaults to current directory)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--cachedir", dest="cachedir", action=SetConfig,
|
"--cachedir",
|
||||||
help='package cache directory (defaults to /var/cache/pungi)')
|
dest="cachedir",
|
||||||
|
action=SetConfig,
|
||||||
|
help="package cache directory (defaults to /var/cache/pungi)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--bugurl", dest="bugurl", action=SetConfig,
|
"--bugurl",
|
||||||
help='the url for your bug system (defaults to http://bugzilla.redhat.com)')
|
dest="bugurl",
|
||||||
|
action=SetConfig,
|
||||||
|
help="the url for your bug system (defaults to http://bugzilla.redhat.com)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--selfhosting", action="store_true", dest="selfhosting",
|
"--selfhosting",
|
||||||
help='build a self-hosting tree by following build dependencies (optional)')
|
action="store_true",
|
||||||
|
dest="selfhosting",
|
||||||
|
help="build a self-hosting tree by following build dependencies (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--fulltree", action="store_true", dest="fulltree",
|
"--fulltree",
|
||||||
help='build a tree that includes all packages built from corresponding source rpms (optional)')
|
action="store_true",
|
||||||
|
dest="fulltree",
|
||||||
|
help="build a tree that includes all packages built from corresponding source rpms (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nosource", action="store_true", dest="nosource",
|
"--nosource",
|
||||||
help='disable gathering of source packages (optional)')
|
action="store_true",
|
||||||
|
dest="nosource",
|
||||||
|
help="disable gathering of source packages (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nodebuginfo", action="store_true", dest="nodebuginfo",
|
"--nodebuginfo",
|
||||||
help='disable gathering of debuginfo packages (optional)')
|
action="store_true",
|
||||||
|
dest="nodebuginfo",
|
||||||
|
help="disable gathering of debuginfo packages (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nodownload", action="store_true", dest="nodownload",
|
"--nodownload",
|
||||||
help='disable downloading of packages. instead, print the package URLs (optional)')
|
action="store_true",
|
||||||
|
dest="nodownload",
|
||||||
|
help="disable downloading of packages. instead, print the package URLs (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--norelnotes", action="store_true", dest="norelnotes",
|
"--norelnotes",
|
||||||
help='disable gathering of release notes (optional); DEPRECATED')
|
action="store_true",
|
||||||
|
dest="norelnotes",
|
||||||
|
help="disable gathering of release notes (optional); DEPRECATED",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nogreedy", action="store_true", dest="nogreedy",
|
"--nogreedy",
|
||||||
help='disable pulling of all providers of package dependencies (optional)')
|
action="store_true",
|
||||||
|
dest="nogreedy",
|
||||||
|
help="disable pulling of all providers of package dependencies (optional)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nodeps", action="store_false", dest="resolve_deps", default=True,
|
"--nodeps",
|
||||||
help='disable resolving dependencies')
|
action="store_false",
|
||||||
|
dest="resolve_deps",
|
||||||
|
default=True,
|
||||||
|
help="disable resolving dependencies",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--sourceisos", default=False, action="store_true", dest="sourceisos",
|
"--sourceisos",
|
||||||
help='Create the source isos (other arch runs must be done)')
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
dest="sourceisos",
|
||||||
|
help="Create the source isos (other arch runs must be done)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--force", default=False, action="store_true",
|
"--force",
|
||||||
help='Force reuse of an existing destination directory (will overwrite files)')
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="Force reuse of an existing destination directory (will overwrite files)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--isfinal", default=False, action="store_true",
|
"--isfinal",
|
||||||
help='Specify this is a GA tree, which causes betanag to be turned off during install')
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="Specify this is a GA tree, which causes betanag to be turned off during install",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--nohash", default=False, action="store_true",
|
"--nohash",
|
||||||
help='disable hashing the Packages trees')
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="disable hashing the Packages trees",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--full-archlist", action="store_true",
|
"--full-archlist",
|
||||||
help='Use the full arch list for x86_64 (include i686, i386, etc.)')
|
action="store_true",
|
||||||
parser.add_argument("--arch", help='Override default (uname based) arch')
|
help="Use the full arch list for x86_64 (include i686, i386, etc.)",
|
||||||
|
)
|
||||||
|
parser.add_argument("--arch", help="Override default (uname based) arch")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--greedy", metavar="METHOD",
|
"--greedy", metavar="METHOD", help="Greedy method; none, all, build"
|
||||||
help='Greedy method; none, all, build')
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--multilib", action="append", metavar="METHOD",
|
"--multilib",
|
||||||
help='Multilib method; can be specified multiple times; recommended: devel, runtime')
|
action="append",
|
||||||
|
metavar="METHOD",
|
||||||
|
help="Multilib method; can be specified multiple times; recommended: devel, runtime",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--lookaside-repo", action="append", dest="lookaside_repos", metavar="NAME",
|
"--lookaside-repo",
|
||||||
help='Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)')
|
action="append",
|
||||||
|
dest="lookaside_repos",
|
||||||
|
metavar="NAME",
|
||||||
|
help="Specify lookaside repo name(s) (packages will used for depsolving but not be included in the output)",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--workdirbase", dest="workdirbase", action=SetConfig,
|
"--workdirbase",
|
||||||
help='base working directory (defaults to destdir + /work)')
|
dest="workdirbase",
|
||||||
parser.add_argument("--no-dvd", default=False, action="store_true", dest="no_dvd",
|
action=SetConfig,
|
||||||
help='Do not make a install DVD/CD only the netinstall image and the tree')
|
help="base working directory (defaults to destdir + /work)",
|
||||||
parser.add_argument("--lorax-conf",
|
)
|
||||||
help='Path to lorax.conf file (optional)')
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-i", "--installpkgs", default=[], action="append", metavar="STRING",
|
"--no-dvd",
|
||||||
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)")
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
dest="no_dvd",
|
||||||
|
help="Do not make a install DVD/CD only the netinstall image and the tree",
|
||||||
|
)
|
||||||
|
parser.add_argument("--lorax-conf", help="Path to lorax.conf file (optional)")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--multilibconf", default=None, action=SetConfig,
|
"-i",
|
||||||
help="Path to multilib conf files. Default is /usr/share/pungi/multilib/")
|
"--installpkgs",
|
||||||
|
default=[],
|
||||||
parser.add_argument("-c", "--config", dest="config", required=True,
|
action="append",
|
||||||
help='Path to kickstart config file')
|
metavar="STRING",
|
||||||
parser.add_argument("--all-stages", action="store_true", default=True, dest="do_all",
|
help="Package glob for lorax to install before runtime-install.tmpl runs. (may be listed multiple times)",
|
||||||
help="Enable ALL stages")
|
)
|
||||||
parser.add_argument("-G", action="store_true", default=False, dest="do_gather",
|
|
||||||
help="Flag to enable processing the Gather stage")
|
|
||||||
parser.add_argument("-C", action="store_true", default=False, dest="do_createrepo",
|
|
||||||
help="Flag to enable processing the Createrepo stage")
|
|
||||||
parser.add_argument("-B", action="store_true", default=False, dest="do_buildinstall",
|
|
||||||
help="Flag to enable processing the BuildInstall stage")
|
|
||||||
parser.add_argument("-I", action="store_true", default=False, dest="do_createiso",
|
|
||||||
help="Flag to enable processing the CreateISO stage")
|
|
||||||
parser.add_argument("--relnotepkgs", dest="relnotepkgs", action=SetConfig,
|
|
||||||
help='Rpms which contain the release notes')
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--relnotefilere", dest="relnotefilere", action=SetConfig,
|
"--multilibconf",
|
||||||
help='Which files are the release notes -- GPL EULA')
|
default=None,
|
||||||
parser.add_argument("--nomacboot", action="store_true", dest="nomacboot",
|
action=SetConfig,
|
||||||
help='disable setting up macboot as no hfs support ')
|
help="Path to multilib conf files. Default is /usr/share/pungi/multilib/",
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--rootfs-size", dest="rootfs_size", action=SetConfig, default=False,
|
"-c",
|
||||||
help='Size of root filesystem in GiB. If not specified, use lorax default value')
|
"--config",
|
||||||
|
dest="config",
|
||||||
|
required=True,
|
||||||
|
help="Path to kickstart config file",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--all-stages",
|
||||||
|
action="store_true",
|
||||||
|
default=True,
|
||||||
|
dest="do_all",
|
||||||
|
help="Enable ALL stages",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-G",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
dest="do_gather",
|
||||||
|
help="Flag to enable processing the Gather stage",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-C",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
dest="do_createrepo",
|
||||||
|
help="Flag to enable processing the Createrepo stage",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-B",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
dest="do_buildinstall",
|
||||||
|
help="Flag to enable processing the BuildInstall stage",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-I",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
dest="do_createiso",
|
||||||
|
help="Flag to enable processing the CreateISO stage",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--relnotepkgs",
|
||||||
|
dest="relnotepkgs",
|
||||||
|
action=SetConfig,
|
||||||
|
help="Rpms which contain the release notes",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--relnotefilere",
|
||||||
|
dest="relnotefilere",
|
||||||
|
action=SetConfig,
|
||||||
|
help="Which files are the release notes -- GPL EULA",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--nomacboot",
|
||||||
|
action="store_true",
|
||||||
|
dest="nomacboot",
|
||||||
|
help="disable setting up macboot as no hfs support ",
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--pungirc", dest="pungirc", default='~/.pungirc', action=SetConfig,
|
"--rootfs-size",
|
||||||
help='Read pungi options from config file ')
|
dest="rootfs_size",
|
||||||
|
action=SetConfig,
|
||||||
|
default=False,
|
||||||
|
help="Size of root filesystem in GiB. If not specified, use lorax default value",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--pungirc",
|
||||||
|
dest="pungirc",
|
||||||
|
default="~/.pungirc",
|
||||||
|
action=SetConfig,
|
||||||
|
help="Read pungi options from config file ",
|
||||||
|
)
|
||||||
|
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
if not config.get('pungi', 'variant').isalnum() and not config.get('pungi', 'variant') == '':
|
if (
|
||||||
|
not config.get("pungi", "variant").isalnum()
|
||||||
|
and not config.get("pungi", "variant") == ""
|
||||||
|
):
|
||||||
parser.error("Variant must be alphanumeric")
|
parser.error("Variant must be alphanumeric")
|
||||||
|
|
||||||
if opts.do_gather or opts.do_createrepo or opts.do_buildinstall or opts.do_createiso:
|
if (
|
||||||
|
opts.do_gather
|
||||||
|
or opts.do_createrepo
|
||||||
|
or opts.do_buildinstall
|
||||||
|
or opts.do_createiso
|
||||||
|
):
|
||||||
opts.do_all = False
|
opts.do_all = False
|
||||||
|
|
||||||
if opts.arch and (opts.do_all or opts.do_buildinstall):
|
if opts.arch and (opts.do_all or opts.do_buildinstall):
|
||||||
parser.error("Cannot override arch while the BuildInstall stage is enabled")
|
parser.error("Cannot override arch while the BuildInstall stage is enabled")
|
||||||
|
|
||||||
# set the iso_basename.
|
# set the iso_basename.
|
||||||
if not config.get('pungi', 'variant') == '':
|
if not config.get("pungi", "variant") == "":
|
||||||
config.set('pungi', 'iso_basename', '%s-%s' % (config.get('pungi', 'family'), config.get('pungi', 'variant')))
|
config.set(
|
||||||
|
"pungi",
|
||||||
|
"iso_basename",
|
||||||
|
"%s-%s" % (config.get("pungi", "family"), config.get("pungi", "variant")),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
config.set('pungi', 'iso_basename', config.get('pungi', 'family'))
|
config.set("pungi", "iso_basename", config.get("pungi", "family"))
|
||||||
|
|
||||||
return opts
|
return opts
|
||||||
|
|
||||||
@ -192,45 +340,53 @@ def main():
|
|||||||
print("INFO: selinux disabled")
|
print("INFO: selinux disabled")
|
||||||
enforcing = False
|
enforcing = False
|
||||||
if enforcing:
|
if enforcing:
|
||||||
print("WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled.")
|
print(
|
||||||
|
"WARNING: SELinux is enforcing. This may lead to a compose with selinux disabled."
|
||||||
|
)
|
||||||
print("Consider running with setenforce 0.")
|
print("Consider running with setenforce 0.")
|
||||||
|
|
||||||
# Set up the kickstart parser and pass in the kickstart file we were handed
|
# Set up the kickstart parser and pass in the kickstart file we were handed
|
||||||
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
ksparser = pungi.ks.get_ksparser(ks_path=opts.config)
|
||||||
|
|
||||||
if opts.sourceisos:
|
if opts.sourceisos:
|
||||||
config.set('pungi', 'arch', 'source')
|
config.set("pungi", "arch", "source")
|
||||||
|
|
||||||
for part in ksparser.handler.partition.partitions:
|
for part in ksparser.handler.partition.partitions:
|
||||||
if part.mountpoint == 'iso':
|
if part.mountpoint == "iso":
|
||||||
config.set('pungi', 'cdsize', str(part.size))
|
config.set("pungi", "cdsize", str(part.size))
|
||||||
|
|
||||||
config.set('pungi', 'force', str(opts.force))
|
config.set("pungi", "force", str(opts.force))
|
||||||
|
|
||||||
if config.get('pungi', 'workdirbase') == '/work':
|
if config.get("pungi", "workdirbase") == "/work":
|
||||||
config.set('pungi', 'workdirbase', "%s/work" % config.get('pungi', 'destdir'))
|
config.set("pungi", "workdirbase", "%s/work" % config.get("pungi", "destdir"))
|
||||||
# Set up our directories
|
# Set up our directories
|
||||||
if not os.path.exists(config.get('pungi', 'destdir')):
|
if not os.path.exists(config.get("pungi", "destdir")):
|
||||||
try:
|
try:
|
||||||
os.makedirs(config.get('pungi', 'destdir'))
|
os.makedirs(config.get("pungi", "destdir"))
|
||||||
except OSError:
|
except OSError:
|
||||||
print("Error: Cannot create destination dir %s" % config.get('pungi', 'destdir'),
|
print(
|
||||||
file=sys.stderr)
|
"Error: Cannot create destination dir %s"
|
||||||
|
% config.get("pungi", "destdir"),
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print("Warning: Reusing existing destination directory.")
|
print("Warning: Reusing existing destination directory.")
|
||||||
|
|
||||||
if not os.path.exists(config.get('pungi', 'workdirbase')):
|
if not os.path.exists(config.get("pungi", "workdirbase")):
|
||||||
try:
|
try:
|
||||||
os.makedirs(config.get('pungi', 'workdirbase'))
|
os.makedirs(config.get("pungi", "workdirbase"))
|
||||||
except OSError:
|
except OSError:
|
||||||
print("Error: Cannot create working base dir %s" % config.get('pungi', 'workdirbase'),
|
print(
|
||||||
file=sys.stderr)
|
"Error: Cannot create working base dir %s"
|
||||||
|
% config.get("pungi", "workdirbase"),
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print("Warning: Reusing existing working base directory.")
|
print("Warning: Reusing existing working base directory.")
|
||||||
|
|
||||||
cachedir = config.get('pungi', 'cachedir')
|
cachedir = config.get("pungi", "cachedir")
|
||||||
|
|
||||||
if not os.path.exists(cachedir):
|
if not os.path.exists(cachedir):
|
||||||
try:
|
try:
|
||||||
@ -241,32 +397,32 @@ def main():
|
|||||||
|
|
||||||
# Set debuginfo flag
|
# Set debuginfo flag
|
||||||
if opts.nodebuginfo:
|
if opts.nodebuginfo:
|
||||||
config.set('pungi', 'debuginfo', "False")
|
config.set("pungi", "debuginfo", "False")
|
||||||
if opts.greedy:
|
if opts.greedy:
|
||||||
config.set('pungi', 'greedy', opts.greedy)
|
config.set("pungi", "greedy", opts.greedy)
|
||||||
else:
|
else:
|
||||||
# XXX: compatibility
|
# XXX: compatibility
|
||||||
if opts.nogreedy:
|
if opts.nogreedy:
|
||||||
config.set('pungi', 'greedy', "none")
|
config.set("pungi", "greedy", "none")
|
||||||
else:
|
else:
|
||||||
config.set('pungi', 'greedy', "all")
|
config.set("pungi", "greedy", "all")
|
||||||
config.set('pungi', 'resolve_deps', str(bool(opts.resolve_deps)))
|
config.set("pungi", "resolve_deps", str(bool(opts.resolve_deps)))
|
||||||
if opts.isfinal:
|
if opts.isfinal:
|
||||||
config.set('pungi', 'isfinal', "True")
|
config.set("pungi", "isfinal", "True")
|
||||||
if opts.nohash:
|
if opts.nohash:
|
||||||
config.set('pungi', 'nohash', "True")
|
config.set("pungi", "nohash", "True")
|
||||||
if opts.full_archlist:
|
if opts.full_archlist:
|
||||||
config.set('pungi', 'full_archlist', "True")
|
config.set("pungi", "full_archlist", "True")
|
||||||
if opts.arch:
|
if opts.arch:
|
||||||
config.set('pungi', 'arch', opts.arch)
|
config.set("pungi", "arch", opts.arch)
|
||||||
if opts.multilib:
|
if opts.multilib:
|
||||||
config.set('pungi', 'multilib', " ".join(opts.multilib))
|
config.set("pungi", "multilib", " ".join(opts.multilib))
|
||||||
if opts.lookaside_repos:
|
if opts.lookaside_repos:
|
||||||
config.set('pungi', 'lookaside_repos', " ".join(opts.lookaside_repos))
|
config.set("pungi", "lookaside_repos", " ".join(opts.lookaside_repos))
|
||||||
if opts.no_dvd:
|
if opts.no_dvd:
|
||||||
config.set('pungi', 'no_dvd', "True")
|
config.set("pungi", "no_dvd", "True")
|
||||||
if opts.nomacboot:
|
if opts.nomacboot:
|
||||||
config.set('pungi', 'nomacboot', "True")
|
config.set("pungi", "nomacboot", "True")
|
||||||
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
config.set("pungi", "fulltree", str(bool(opts.fulltree)))
|
||||||
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
config.set("pungi", "selfhosting", str(bool(opts.selfhosting)))
|
||||||
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
config.set("pungi", "nosource", str(bool(opts.nosource)))
|
||||||
@ -303,7 +459,9 @@ def main():
|
|||||||
flags_str = ",".join(line["flags"])
|
flags_str = ",".join(line["flags"])
|
||||||
if flags_str:
|
if flags_str:
|
||||||
flags_str = "(%s)" % flags_str
|
flags_str = "(%s)" % flags_str
|
||||||
sys.stdout.write("DEBUGINFO%s: %s\n" % (flags_str, line["path"]))
|
sys.stdout.write(
|
||||||
|
"DEBUGINFO%s: %s\n" % (flags_str, line["path"])
|
||||||
|
)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
else:
|
else:
|
||||||
mypungi.downloadDebuginfo()
|
mypungi.downloadDebuginfo()
|
||||||
@ -320,7 +478,10 @@ def main():
|
|||||||
|
|
||||||
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024 ** 2))
|
print("RPM size: %s MiB" % (mypungi.size_packages() / 1024 ** 2))
|
||||||
if not opts.nodebuginfo:
|
if not opts.nodebuginfo:
|
||||||
print("DEBUGINFO size: %s MiB" % (mypungi.size_debuginfo() / 1024 ** 2))
|
print(
|
||||||
|
"DEBUGINFO size: %s MiB"
|
||||||
|
% (mypungi.size_debuginfo() / 1024 ** 2)
|
||||||
|
)
|
||||||
if not opts.nosource:
|
if not opts.nosource:
|
||||||
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024 ** 2))
|
print("SRPM size: %s MiB" % (mypungi.size_srpms() / 1024 ** 2))
|
||||||
|
|
||||||
@ -340,10 +501,13 @@ def main():
|
|||||||
# Do things slightly different for src.
|
# Do things slightly different for src.
|
||||||
if opts.sourceisos:
|
if opts.sourceisos:
|
||||||
# we already have all the content gathered
|
# we already have all the content gathered
|
||||||
mypungi.topdir = os.path.join(config.get('pungi', 'destdir'),
|
mypungi.topdir = os.path.join(
|
||||||
config.get('pungi', 'version'),
|
config.get("pungi", "destdir"),
|
||||||
config.get('pungi', 'variant'),
|
config.get("pungi", "version"),
|
||||||
'source', 'SRPMS')
|
config.get("pungi", "variant"),
|
||||||
|
"source",
|
||||||
|
"SRPMS",
|
||||||
|
)
|
||||||
mypungi.doCreaterepo(comps=False)
|
mypungi.doCreaterepo(comps=False)
|
||||||
if opts.do_all or opts.do_createiso:
|
if opts.do_all or opts.do_createiso:
|
||||||
mypungi.doCreateIsos()
|
mypungi.doCreateIsos()
|
||||||
|
@ -18,22 +18,17 @@ from pungi.util import temp_dir
|
|||||||
def get_parser():
|
def get_parser():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--profiler",
|
"--profiler", action="store_true",
|
||||||
action="store_true",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--arch",
|
"--arch", required=True,
|
||||||
required=True,
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--config",
|
"--config", metavar="PATH", required=True, help="path to kickstart config file",
|
||||||
metavar="PATH",
|
|
||||||
required=True,
|
|
||||||
help="path to kickstart config file",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--download-to",
|
"--download-to",
|
||||||
metavar='PATH',
|
metavar="PATH",
|
||||||
help="download packages to given directory instead of just printing paths",
|
help="download packages to given directory instead of just printing paths",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -47,9 +42,7 @@ def get_parser():
|
|||||||
|
|
||||||
group = parser.add_argument_group("Gather options")
|
group = parser.add_argument_group("Gather options")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--nodeps",
|
"--nodeps", action="store_true", help="disable resolving dependencies",
|
||||||
action="store_true",
|
|
||||||
help="disable resolving dependencies",
|
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--selfhosting",
|
"--selfhosting",
|
||||||
@ -68,9 +61,7 @@ def get_parser():
|
|||||||
choices=["none", "all", "build"],
|
choices=["none", "all", "build"],
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--multilib",
|
"--multilib", metavar="[METHOD]", action="append",
|
||||||
metavar="[METHOD]",
|
|
||||||
action="append",
|
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--tempdir",
|
"--tempdir",
|
||||||
@ -135,13 +126,13 @@ def main(ns, persistdir, cachedir):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not getattr(ks_repo, "metalink", False):
|
if not getattr(ks_repo, "metalink", False):
|
||||||
dnf_obj.add_repo(
|
dnf_obj.add_repo(ks_repo.name, ks_repo.baseurl, enablegroups=False)
|
||||||
ks_repo.name, ks_repo.baseurl, enablegroups=False
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
dnf_obj.add_repo(
|
dnf_obj.add_repo(
|
||||||
ks_repo.name, ks_repo.baseurl, enablegroups=False,
|
ks_repo.name,
|
||||||
metalink=ks_repo.metalink
|
ks_repo.baseurl,
|
||||||
|
enablegroups=False,
|
||||||
|
metalink=ks_repo.metalink,
|
||||||
)
|
)
|
||||||
|
|
||||||
for ks_repo in ksparser.handler.repo.repoList:
|
for ks_repo in ksparser.handler.repo.repoList:
|
||||||
@ -150,8 +141,7 @@ def main(ns, persistdir, cachedir):
|
|||||||
if not getattr(ks_repo, "metalink", False):
|
if not getattr(ks_repo, "metalink", False):
|
||||||
dnf_obj.add_repo(ks_repo.name, ks_repo.baseurl)
|
dnf_obj.add_repo(ks_repo.name, ks_repo.baseurl)
|
||||||
else:
|
else:
|
||||||
dnf_obj.add_repo(ks_repo.name, ks_repo.baseurl,
|
dnf_obj.add_repo(ks_repo.name, ks_repo.baseurl, metalink=ks_repo.metalink)
|
||||||
metalink=ks_repo.metalink)
|
|
||||||
|
|
||||||
with Profiler("DnfWrapper.fill_sack()"):
|
with Profiler("DnfWrapper.fill_sack()"):
|
||||||
dnf_obj.fill_sack(load_system_repo=False, load_available_repos=True)
|
dnf_obj.fill_sack(load_system_repo=False, load_available_repos=True)
|
||||||
@ -190,7 +180,7 @@ def _get_url(pkg):
|
|||||||
|
|
||||||
|
|
||||||
def _fmt_flags(flags):
|
def _fmt_flags(flags):
|
||||||
return "(%s)" % ",".join(sorted(f.name.replace('_', '-') for f in flags))
|
return "(%s)" % ",".join(sorted(f.name.replace("_", "-") for f in flags))
|
||||||
|
|
||||||
|
|
||||||
def deduplicate(gather_obj, items):
|
def deduplicate(gather_obj, items):
|
||||||
|
@ -35,7 +35,7 @@ COMPOSE = None
|
|||||||
def main():
|
def main():
|
||||||
global COMPOSE
|
global COMPOSE
|
||||||
|
|
||||||
PHASES_NAMES_MODIFIED = PHASES_NAMES + ['productimg']
|
PHASES_NAMES_MODIFIED = PHASES_NAMES + ["productimg"]
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
group = parser.add_mutually_exclusive_group(required=True)
|
group = parser.add_mutually_exclusive_group(required=True)
|
||||||
@ -51,19 +51,19 @@ def main():
|
|||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--label",
|
"--label",
|
||||||
help="specify compose label (example: Snapshot-1.0); required for production composes"
|
help="specify compose label (example: Snapshot-1.0); required for production composes",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--no-label",
|
"--no-label",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="make a production compose without label"
|
help="make a production compose without label",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--supported",
|
"--supported",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="set supported flag on media (automatically on for 'RC-x.y' labels)"
|
help="set supported flag on media (automatically on for 'RC-x.y' labels)",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--old-composes",
|
"--old-composes",
|
||||||
@ -73,11 +73,7 @@ def main():
|
|||||||
action="append",
|
action="append",
|
||||||
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
|
help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument("--config", help="Config file", required=True)
|
||||||
"--config",
|
|
||||||
help="Config file",
|
|
||||||
required=True
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--skip-phase",
|
"--skip-phase",
|
||||||
metavar="PHASE",
|
metavar="PHASE",
|
||||||
@ -139,14 +135,14 @@ def main():
|
|||||||
"--notification-script",
|
"--notification-script",
|
||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
help="script for sending progress notification messages"
|
help="script for sending progress notification messages",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--no-latest-link",
|
"--no-latest-link",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
dest="no_latest_link",
|
dest="no_latest_link",
|
||||||
help="don't create latest symbol link to this compose"
|
help="don't create latest symbol link to this compose",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--latest-link-status",
|
"--latest-link-status",
|
||||||
@ -159,23 +155,30 @@ def main():
|
|||||||
"--print-output-dir",
|
"--print-output-dir",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="print the compose directory"
|
help="print the compose directory",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--quiet",
|
"--quiet",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="quiet mode, don't print log on screen"
|
help="quiet mode, don't print log on screen",
|
||||||
)
|
)
|
||||||
|
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
import pungi.notifier
|
import pungi.notifier
|
||||||
|
|
||||||
notifier = pungi.notifier.PungiNotifier(opts.notification_script)
|
notifier = pungi.notifier.PungiNotifier(opts.notification_script)
|
||||||
|
|
||||||
def fail_to_start(msg, **kwargs):
|
def fail_to_start(msg, **kwargs):
|
||||||
notifier.send('fail-to-start', workdir=opts.target_dir,
|
notifier.send(
|
||||||
command=sys.argv, target_dir=opts.target_dir,
|
"fail-to-start",
|
||||||
config=opts.config, detail=msg, **kwargs)
|
workdir=opts.target_dir,
|
||||||
|
command=sys.argv,
|
||||||
|
target_dir=opts.target_dir,
|
||||||
|
config=opts.config,
|
||||||
|
detail=msg,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
def abort(msg):
|
def abort(msg):
|
||||||
fail_to_start(msg)
|
fail_to_start(msg)
|
||||||
@ -184,11 +187,17 @@ def main():
|
|||||||
if opts.target_dir and not opts.compose_dir:
|
if opts.target_dir and not opts.compose_dir:
|
||||||
opts.target_dir = os.path.abspath(opts.target_dir)
|
opts.target_dir = os.path.abspath(opts.target_dir)
|
||||||
if not os.path.isdir(opts.target_dir):
|
if not os.path.isdir(opts.target_dir):
|
||||||
abort("The target directory does not exist or is not a directory: %s" % opts.target_dir)
|
abort(
|
||||||
|
"The target directory does not exist or is not a directory: %s"
|
||||||
|
% opts.target_dir
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
opts.compose_dir = os.path.abspath(opts.compose_dir)
|
opts.compose_dir = os.path.abspath(opts.compose_dir)
|
||||||
if not os.path.isdir(opts.compose_dir):
|
if not os.path.isdir(opts.compose_dir):
|
||||||
abort("The compose directory does not exist or is not a directory: %s" % opts.compose_dir)
|
abort(
|
||||||
|
"The compose directory does not exist or is not a directory: %s"
|
||||||
|
% opts.compose_dir
|
||||||
|
)
|
||||||
|
|
||||||
opts.config = os.path.abspath(opts.config)
|
opts.config = os.path.abspath(opts.config)
|
||||||
|
|
||||||
@ -214,12 +223,13 @@ def main():
|
|||||||
|
|
||||||
conf = util.load_config(opts.config)
|
conf = util.load_config(opts.config)
|
||||||
|
|
||||||
compose_type = opts.compose_type or conf.get('compose_type', 'production')
|
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
||||||
if compose_type == "production" and not opts.label and not opts.no_label:
|
if compose_type == "production" and not opts.label and not opts.no_label:
|
||||||
abort("must specify label for a production compose")
|
abort("must specify label for a production compose")
|
||||||
|
|
||||||
# check if all requirements are met
|
# check if all requirements are met
|
||||||
import pungi.checks
|
import pungi.checks
|
||||||
|
|
||||||
if not pungi.checks.check(conf):
|
if not pungi.checks.check(conf):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
pungi.checks.check_umask(logger)
|
pungi.checks.check_umask(logger)
|
||||||
@ -229,8 +239,11 @@ def main():
|
|||||||
|
|
||||||
# TODO: workaround for config files containing skip_phase = productimg
|
# TODO: workaround for config files containing skip_phase = productimg
|
||||||
# Remove when all config files are up to date
|
# Remove when all config files are up to date
|
||||||
if 'productimg' in opts.skip_phase or 'productimg' in opts.just_phase:
|
if "productimg" in opts.skip_phase or "productimg" in opts.just_phase:
|
||||||
print('WARNING: productimg phase has been removed, please remove it from --skip-phase or --just-phase option', file=sys.stderr)
|
print(
|
||||||
|
"WARNING: productimg phase has been removed, please remove it from --skip-phase or --just-phase option",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
for err in errors[:]:
|
for err in errors[:]:
|
||||||
if "'productimg' is not one of" in err:
|
if "'productimg' is not one of" in err:
|
||||||
errors.remove(err)
|
errors.remove(err)
|
||||||
@ -242,18 +255,21 @@ def main():
|
|||||||
if errors:
|
if errors:
|
||||||
for error in errors:
|
for error in errors:
|
||||||
print(error, file=sys.stderr)
|
print(error, file=sys.stderr)
|
||||||
fail_to_start('Config validation failed', errors=errors)
|
fail_to_start("Config validation failed", errors=errors)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if opts.target_dir:
|
if opts.target_dir:
|
||||||
compose_dir = Compose.get_compose_dir(opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label)
|
compose_dir = Compose.get_compose_dir(
|
||||||
|
opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
compose_dir = opts.compose_dir
|
compose_dir = opts.compose_dir
|
||||||
|
|
||||||
if opts.print_output_dir:
|
if opts.print_output_dir:
|
||||||
print('Compose dir: %s' % compose_dir)
|
print("Compose dir: %s" % compose_dir)
|
||||||
|
|
||||||
compose = Compose(conf,
|
compose = Compose(
|
||||||
|
conf,
|
||||||
topdir=compose_dir,
|
topdir=compose_dir,
|
||||||
skip_phases=opts.skip_phase,
|
skip_phases=opts.skip_phase,
|
||||||
just_phases=opts.just_phase,
|
just_phases=opts.just_phase,
|
||||||
@ -261,10 +277,15 @@ def main():
|
|||||||
koji_event=opts.koji_event,
|
koji_event=opts.koji_event,
|
||||||
supported=opts.supported,
|
supported=opts.supported,
|
||||||
logger=logger,
|
logger=logger,
|
||||||
notifier=notifier)
|
notifier=notifier,
|
||||||
|
)
|
||||||
notifier.compose = compose
|
notifier.compose = compose
|
||||||
COMPOSE = compose
|
COMPOSE = compose
|
||||||
run_compose(compose, create_latest_link=create_latest_link, latest_link_status=latest_link_status)
|
run_compose(
|
||||||
|
compose,
|
||||||
|
create_latest_link=create_latest_link,
|
||||||
|
latest_link_status=latest_link_status,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
||||||
@ -279,7 +300,9 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
compose.log_info("Pungi version: %s" % get_full_version())
|
compose.log_info("Pungi version: %s" % get_full_version())
|
||||||
compose.log_info("User name: %s" % getpass.getuser())
|
compose.log_info("User name: %s" % getpass.getuser())
|
||||||
compose.log_info("Working directory: %s" % os.getcwd())
|
compose.log_info("Working directory: %s" % os.getcwd())
|
||||||
compose.log_info("Command line: %s" % " ".join([shlex_quote(arg) for arg in sys.argv]))
|
compose.log_info(
|
||||||
|
"Command line: %s" % " ".join([shlex_quote(arg) for arg in sys.argv])
|
||||||
|
)
|
||||||
compose.log_info("Compose top directory: %s" % compose.topdir)
|
compose.log_info("Compose top directory: %s" % compose.topdir)
|
||||||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||||
compose.read_variants()
|
compose.read_variants()
|
||||||
@ -301,7 +324,9 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
gather_phase = pungi.phases.GatherPhase(compose, pkgset_phase)
|
gather_phase = pungi.phases.GatherPhase(compose, pkgset_phase)
|
||||||
extrafiles_phase = pungi.phases.ExtraFilesPhase(compose, pkgset_phase)
|
extrafiles_phase = pungi.phases.ExtraFilesPhase(compose, pkgset_phase)
|
||||||
createrepo_phase = pungi.phases.CreaterepoPhase(compose, pkgset_phase)
|
createrepo_phase = pungi.phases.CreaterepoPhase(compose, pkgset_phase)
|
||||||
ostree_installer_phase = pungi.phases.OstreeInstallerPhase(compose, buildinstall_phase, pkgset_phase)
|
ostree_installer_phase = pungi.phases.OstreeInstallerPhase(
|
||||||
|
compose, buildinstall_phase, pkgset_phase
|
||||||
|
)
|
||||||
ostree_phase = pungi.phases.OSTreePhase(compose, pkgset_phase)
|
ostree_phase = pungi.phases.OSTreePhase(compose, pkgset_phase)
|
||||||
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
|
createiso_phase = pungi.phases.CreateisoPhase(compose, buildinstall_phase)
|
||||||
extra_isos_phase = pungi.phases.ExtraIsosPhase(compose)
|
extra_isos_phase = pungi.phases.ExtraIsosPhase(compose)
|
||||||
@ -313,12 +338,24 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
test_phase = pungi.phases.TestPhase(compose)
|
test_phase = pungi.phases.TestPhase(compose)
|
||||||
|
|
||||||
# check if all config options are set
|
# check if all config options are set
|
||||||
for phase in (init_phase, pkgset_phase, createrepo_phase,
|
for phase in (
|
||||||
buildinstall_phase, gather_phase,
|
init_phase,
|
||||||
extrafiles_phase, createiso_phase, liveimages_phase,
|
pkgset_phase,
|
||||||
livemedia_phase, image_build_phase, image_checksum_phase,
|
createrepo_phase,
|
||||||
test_phase, ostree_phase, ostree_installer_phase,
|
buildinstall_phase,
|
||||||
extra_isos_phase, osbs_phase):
|
gather_phase,
|
||||||
|
extrafiles_phase,
|
||||||
|
createiso_phase,
|
||||||
|
liveimages_phase,
|
||||||
|
livemedia_phase,
|
||||||
|
image_build_phase,
|
||||||
|
image_checksum_phase,
|
||||||
|
test_phase,
|
||||||
|
ostree_phase,
|
||||||
|
ostree_installer_phase,
|
||||||
|
extra_isos_phase,
|
||||||
|
osbs_phase,
|
||||||
|
):
|
||||||
if phase.skip():
|
if phase.skip():
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
@ -330,7 +367,7 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
for i in errors:
|
for i in errors:
|
||||||
compose.log_error(i)
|
compose.log_error(i)
|
||||||
print(i)
|
print(i)
|
||||||
raise RuntimeError('Configuration is not valid')
|
raise RuntimeError("Configuration is not valid")
|
||||||
|
|
||||||
# PREP
|
# PREP
|
||||||
|
|
||||||
@ -338,10 +375,12 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
# in same way as .validate() or .run()
|
# in same way as .validate() or .run()
|
||||||
|
|
||||||
# Prep for liveimages - Obtain a password for signing rpm wrapped images
|
# Prep for liveimages - Obtain a password for signing rpm wrapped images
|
||||||
if ("signing_key_password_file" in compose.conf
|
if (
|
||||||
|
"signing_key_password_file" in compose.conf
|
||||||
and "signing_command" in compose.conf
|
and "signing_command" in compose.conf
|
||||||
and "%(signing_key_password)s" in compose.conf["signing_command"]
|
and "%(signing_key_password)s" in compose.conf["signing_command"]
|
||||||
and not liveimages_phase.skip()):
|
and not liveimages_phase.skip()
|
||||||
|
):
|
||||||
# TODO: Don't require key if signing is turned off
|
# TODO: Don't require key if signing is turned off
|
||||||
# Obtain signing key password
|
# Obtain signing key password
|
||||||
signing_key_password = None
|
signing_key_password = None
|
||||||
@ -357,7 +396,11 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
else:
|
else:
|
||||||
# Use text file with password
|
# Use text file with password
|
||||||
try:
|
try:
|
||||||
signing_key_password = open(compose.conf["signing_key_password_file"], "r").readline().rstrip('\n')
|
signing_key_password = (
|
||||||
|
open(compose.conf["signing_key_password_file"], "r")
|
||||||
|
.readline()
|
||||||
|
.rstrip("\n")
|
||||||
|
)
|
||||||
except IOError:
|
except IOError:
|
||||||
# Filename is not print intentionally in case someone puts password directly into the option
|
# Filename is not print intentionally in case someone puts password directly into the option
|
||||||
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option"
|
err_msg = "Cannot load password from file specified by 'signing_key_password_file' option"
|
||||||
@ -388,7 +431,9 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
# write treeinfo before ISOs are created
|
# write treeinfo before ISOs are created
|
||||||
for variant in compose.get_variants():
|
for variant in compose.get_variants():
|
||||||
for arch in variant.arches + ["src"]:
|
for arch in variant.arches + ["src"]:
|
||||||
pungi.metadata.write_tree_info(compose, arch, variant, bi=buildinstall_phase)
|
pungi.metadata.write_tree_info(
|
||||||
|
compose, arch, variant, bi=buildinstall_phase
|
||||||
|
)
|
||||||
|
|
||||||
# write .discinfo and media.repo before ISOs are created
|
# write .discinfo and media.repo before ISOs are created
|
||||||
for variant in compose.get_variants():
|
for variant in compose.get_variants():
|
||||||
@ -441,17 +486,28 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
if compose.get_status() in [s.upper() for s in latest_link_status]:
|
if compose.get_status() in [s.upper() for s in latest_link_status]:
|
||||||
latest_link = True
|
latest_link = True
|
||||||
else:
|
else:
|
||||||
compose.log_warning("Compose status (%s) doesn't match with specified latest-link-status (%s), not create latest link."
|
compose.log_warning(
|
||||||
% (compose.get_status(), str(latest_link_status)))
|
"Compose status (%s) doesn't match with specified latest-link-status (%s), not create latest link."
|
||||||
|
% (compose.get_status(), str(latest_link_status))
|
||||||
|
)
|
||||||
|
|
||||||
if latest_link:
|
if latest_link:
|
||||||
compose_dir = os.path.basename(compose.topdir)
|
compose_dir = os.path.basename(compose.topdir)
|
||||||
if len(compose.conf["release_version"].split(".")) == 1:
|
if len(compose.conf["release_version"].split(".")) == 1:
|
||||||
symlink_name = "latest-%s-%s" % (compose.conf["release_short"], compose.conf["release_version"])
|
symlink_name = "latest-%s-%s" % (
|
||||||
|
compose.conf["release_short"],
|
||||||
|
compose.conf["release_version"],
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
symlink_name = "latest-%s-%s" % (compose.conf["release_short"], ".".join(compose.conf["release_version"].split(".")[:-1]))
|
symlink_name = "latest-%s-%s" % (
|
||||||
|
compose.conf["release_short"],
|
||||||
|
".".join(compose.conf["release_version"].split(".")[:-1]),
|
||||||
|
)
|
||||||
if compose.conf.get("base_product_name", ""):
|
if compose.conf.get("base_product_name", ""):
|
||||||
symlink_name += "-%s-%s" % (compose.conf["base_product_short"], compose.conf["base_product_version"])
|
symlink_name += "-%s-%s" % (
|
||||||
|
compose.conf["base_product_short"],
|
||||||
|
compose.conf["base_product_version"],
|
||||||
|
)
|
||||||
symlink = os.path.join(compose.topdir, "..", symlink_name)
|
symlink = os.path.join(compose.topdir, "..", symlink_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -471,8 +527,7 @@ def run_compose(compose, create_latest_link=True, latest_link_status=None):
|
|||||||
def sigterm_handler(signum, frame):
|
def sigterm_handler(signum, frame):
|
||||||
if COMPOSE:
|
if COMPOSE:
|
||||||
COMPOSE.log_error("Compose run failed: signal %s" % signum)
|
COMPOSE.log_error("Compose run failed: signal %s" % signum)
|
||||||
COMPOSE.log_error("Traceback:\n%s"
|
COMPOSE.log_error("Traceback:\n%s" % "\n".join(traceback.format_stack(frame)))
|
||||||
% '\n'.join(traceback.format_stack(frame)))
|
|
||||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||||
COMPOSE.write_status("TERMINATED")
|
COMPOSE.write_status("TERMINATED")
|
||||||
else:
|
else:
|
||||||
@ -495,6 +550,7 @@ def cli_main():
|
|||||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||||
COMPOSE.write_status("DOOMED")
|
COMPOSE.write_status("DOOMED")
|
||||||
import kobo.tback
|
import kobo.tback
|
||||||
|
|
||||||
with open(tb_path, "wb") as f:
|
with open(tb_path, "wb") as f:
|
||||||
f.write(kobo.tback.Traceback().get_traceback())
|
f.write(kobo.tback.Traceback().get_traceback())
|
||||||
else:
|
else:
|
||||||
|
@ -8,7 +8,7 @@ import sys
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('cmd')
|
parser.add_argument("cmd")
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
data = json.load(sys.stdin)
|
data = json.load(sys.stdin)
|
||||||
|
@ -39,40 +39,40 @@ def ts_log(msg):
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('cmd')
|
parser.add_argument("cmd")
|
||||||
opts = parser.parse_args()
|
opts = parser.parse_args()
|
||||||
|
|
||||||
if opts.cmd != 'ostree':
|
if opts.cmd != "ostree":
|
||||||
# Not an announcement of new ostree commit, nothing to do.
|
# Not an announcement of new ostree commit, nothing to do.
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.load(sys.stdin)
|
data = json.load(sys.stdin)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print('Failed to decode data', file=sys.stderr)
|
print("Failed to decode data", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
repo = data['local_repo_path']
|
repo = data["local_repo_path"]
|
||||||
commit = data['commitid']
|
commit = data["commitid"]
|
||||||
if not commit:
|
if not commit:
|
||||||
print("No new commit was created, nothing will get signed.")
|
print("No new commit was created, nothing will get signed.")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
path = '%s/objects/%s/%s.commitmeta' % (repo, commit[:2], commit[2:])
|
path = "%s/objects/%s/%s.commitmeta" % (repo, commit[:2], commit[2:])
|
||||||
|
|
||||||
config = fedmsg.config.load_config()
|
config = fedmsg.config.load_config()
|
||||||
config['active'] = True # Connect out to a fedmsg-relay instance
|
config["active"] = True # Connect out to a fedmsg-relay instance
|
||||||
config['cert_prefix'] = 'releng' # Use this cert.
|
config["cert_prefix"] = "releng" # Use this cert.
|
||||||
fedmsg.init(**config)
|
fedmsg.init(**config)
|
||||||
topic = 'compose.%s' % opts.cmd.replace('-', '.').lower()
|
topic = "compose.%s" % opts.cmd.replace("-", ".").lower()
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
while not os.path.exists(path):
|
while not os.path.exists(path):
|
||||||
ts_log("Commit not signed yet, waiting...")
|
ts_log("Commit not signed yet, waiting...")
|
||||||
count += 1
|
count += 1
|
||||||
if count >= 60: # Repeat every 5 minutes
|
if count >= 60: # Repeat every 5 minutes
|
||||||
print('Repeating notification')
|
print("Repeating notification")
|
||||||
fedmsg.publish(topic=topic, modname='pungi', msg=data)
|
fedmsg.publish(topic=topic, modname="pungi", msg=data)
|
||||||
count = 0
|
count = 0
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
|
280
pungi/util.py
280
pungi/util.py
@ -39,13 +39,27 @@ from productmd.common import get_major_version
|
|||||||
DEBUG_PATTERNS = ["*-debuginfo", "*-debuginfo-*", "*-debugsource"]
|
DEBUG_PATTERNS = ["*-debuginfo", "*-debuginfo-*", "*-debugsource"]
|
||||||
|
|
||||||
|
|
||||||
def _doRunCommand(command, logger, rundir='/tmp', output=subprocess.PIPE, error=subprocess.PIPE, env=None):
|
def _doRunCommand(
|
||||||
|
command,
|
||||||
|
logger,
|
||||||
|
rundir="/tmp",
|
||||||
|
output=subprocess.PIPE,
|
||||||
|
error=subprocess.PIPE,
|
||||||
|
env=None,
|
||||||
|
):
|
||||||
"""Run a command and log the output. Error out if we get something on stderr"""
|
"""Run a command and log the output. Error out if we get something on stderr"""
|
||||||
|
|
||||||
logger.info("Running %s" % subprocess.list2cmdline(command))
|
logger.info("Running %s" % subprocess.list2cmdline(command))
|
||||||
|
|
||||||
p1 = subprocess.Popen(command, cwd=rundir, stdout=output, stderr=error, universal_newlines=True, env=env,
|
p1 = subprocess.Popen(
|
||||||
close_fds=True)
|
command,
|
||||||
|
cwd=rundir,
|
||||||
|
stdout=output,
|
||||||
|
stderr=error,
|
||||||
|
universal_newlines=True,
|
||||||
|
env=env,
|
||||||
|
close_fds=True,
|
||||||
|
)
|
||||||
(out, err) = p1.communicate()
|
(out, err) = p1.communicate()
|
||||||
|
|
||||||
if out:
|
if out:
|
||||||
@ -54,7 +68,9 @@ def _doRunCommand(command, logger, rundir='/tmp', output=subprocess.PIPE, error=
|
|||||||
if p1.returncode != 0:
|
if p1.returncode != 0:
|
||||||
logger.error("Got an error from %s" % command[0])
|
logger.error("Got an error from %s" % command[0])
|
||||||
logger.error(err)
|
logger.error(err)
|
||||||
raise OSError("Got an error (%d) from %s: %s" % (p1.returncode, command[0], err))
|
raise OSError(
|
||||||
|
"Got an error (%d) from %s: %s" % (p1.returncode, command[0], err)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _link(local, target, logger, force=False):
|
def _link(local, target, logger, force=False):
|
||||||
@ -72,7 +88,7 @@ def _link(local, target, logger, force=False):
|
|||||||
os.link(local, target)
|
os.link(local, target)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != 18: # EXDEV
|
if e.errno != 18: # EXDEV
|
||||||
logger.error('Got an error linking from cache: %s' % e)
|
logger.error("Got an error linking from cache: %s" % e)
|
||||||
raise OSError(e)
|
raise OSError(e)
|
||||||
|
|
||||||
# Can't hardlink cross file systems
|
# Can't hardlink cross file systems
|
||||||
@ -86,7 +102,7 @@ def _ensuredir(target, logger, force=False, clean=False):
|
|||||||
# We have to check existance of a logger, as setting the logger could
|
# We have to check existance of a logger, as setting the logger could
|
||||||
# itself cause an issue.
|
# itself cause an issue.
|
||||||
def whoops(func, path, exc_info):
|
def whoops(func, path, exc_info):
|
||||||
message = 'Could not remove %s' % path
|
message = "Could not remove %s" % path
|
||||||
if logger:
|
if logger:
|
||||||
logger.error(message)
|
logger.error(message)
|
||||||
else:
|
else:
|
||||||
@ -94,7 +110,7 @@ def _ensuredir(target, logger, force=False, clean=False):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if os.path.exists(target) and not os.path.isdir(target):
|
if os.path.exists(target) and not os.path.isdir(target):
|
||||||
message = '%s exists but is not a directory.' % target
|
message = "%s exists but is not a directory." % target
|
||||||
if logger:
|
if logger:
|
||||||
logger.error(message)
|
logger.error(message)
|
||||||
else:
|
else:
|
||||||
@ -109,7 +125,7 @@ def _ensuredir(target, logger, force=False, clean=False):
|
|||||||
elif force:
|
elif force:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
message = 'Directory %s already exists. Use --force to overwrite.' % target
|
message = "Directory %s already exists. Use --force to overwrite." % target
|
||||||
if logger:
|
if logger:
|
||||||
logger.error(message)
|
logger.error(message)
|
||||||
else:
|
else:
|
||||||
@ -130,7 +146,7 @@ def _doCheckSum(path, hash, logger):
|
|||||||
|
|
||||||
# Try to open the file, using binary flag.
|
# Try to open the file, using binary flag.
|
||||||
try:
|
try:
|
||||||
myfile = open(path, 'rb')
|
myfile = open(path, "rb")
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
logger.error("Could not open file %s: %s" % (path, e))
|
logger.error("Could not open file %s: %s" % (path, e))
|
||||||
return False
|
return False
|
||||||
@ -138,13 +154,15 @@ def _doCheckSum(path, hash, logger):
|
|||||||
# Loop through the file reading chunks at a time as to not
|
# Loop through the file reading chunks at a time as to not
|
||||||
# put the entire file in memory. That would suck for DVDs
|
# put the entire file in memory. That would suck for DVDs
|
||||||
while True:
|
while True:
|
||||||
chunk = myfile.read(8192) # magic number! Taking suggestions for better blocksize
|
chunk = myfile.read(
|
||||||
|
8192
|
||||||
|
) # magic number! Taking suggestions for better blocksize
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break # we're done with the file
|
break # we're done with the file
|
||||||
sum.update(chunk)
|
sum.update(chunk)
|
||||||
myfile.close()
|
myfile.close()
|
||||||
|
|
||||||
return '%s:%s' % (hash, sum.hexdigest())
|
return "%s:%s" % (hash, sum.hexdigest())
|
||||||
|
|
||||||
|
|
||||||
def makedirs(path, mode=0o775):
|
def makedirs(path, mode=0o775):
|
||||||
@ -168,7 +186,10 @@ def explode_rpm_package(pkg_path, target_dir):
|
|||||||
"""Explode a rpm package into target_dir."""
|
"""Explode a rpm package into target_dir."""
|
||||||
pkg_path = os.path.abspath(pkg_path)
|
pkg_path = os.path.abspath(pkg_path)
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
run("rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % shlex_quote(pkg_path), workdir=target_dir)
|
run(
|
||||||
|
"rpm2cpio %s | cpio -iuvmd && chmod -R a+rX ." % shlex_quote(pkg_path),
|
||||||
|
workdir=target_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def pkg_is_rpm(pkg_obj):
|
def pkg_is_rpm(pkg_obj):
|
||||||
@ -232,15 +253,15 @@ def get_arch_variant_data(conf, var_name, arch, variant, keys=None):
|
|||||||
|
|
||||||
def is_arch_multilib(conf, arch):
|
def is_arch_multilib(conf, arch):
|
||||||
"""Check if at least one variant has multilib enabled on this variant."""
|
"""Check if at least one variant has multilib enabled on this variant."""
|
||||||
return bool(get_arch_variant_data(conf, 'multilib', arch, None))
|
return bool(get_arch_variant_data(conf, "multilib", arch, None))
|
||||||
|
|
||||||
|
|
||||||
def _get_git_ref(fragment):
|
def _get_git_ref(fragment):
|
||||||
if fragment == 'HEAD':
|
if fragment == "HEAD":
|
||||||
return fragment
|
return fragment
|
||||||
if fragment.startswith('origin/'):
|
if fragment.startswith("origin/"):
|
||||||
branch = fragment.split('/', 1)[1]
|
branch = fragment.split("/", 1)[1]
|
||||||
return 'refs/heads/' + branch
|
return "refs/heads/" + branch
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -296,15 +317,15 @@ def resolve_git_url(url):
|
|||||||
|
|
||||||
# Remove git+ prefix from scheme if present. This is for resolving only,
|
# Remove git+ prefix from scheme if present. This is for resolving only,
|
||||||
# the final result must use original scheme.
|
# the final result must use original scheme.
|
||||||
scheme = r.scheme.replace('git+', '')
|
scheme = r.scheme.replace("git+", "")
|
||||||
|
|
||||||
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, '', ''))
|
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
||||||
fragment = resolve_git_ref(baseurl, ref)
|
fragment = resolve_git_ref(baseurl, ref)
|
||||||
|
|
||||||
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
||||||
if '?#' in url:
|
if "?#" in url:
|
||||||
# The urllib library drops empty query string. This hack puts it back in.
|
# The urllib library drops empty query string. This hack puts it back in.
|
||||||
result = result.replace('#', '?#')
|
result = result.replace("#", "?#")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -313,6 +334,7 @@ class GitUrlResolver(object):
|
|||||||
URL with fragment describing reference, or url and refname. It will return
|
URL with fragment describing reference, or url and refname. It will return
|
||||||
either url with changed fragment or just resolved ref.
|
either url with changed fragment or just resolved ref.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, offline=False):
|
def __init__(self, offline=False):
|
||||||
self.offline = offline
|
self.offline = offline
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
@ -373,7 +395,7 @@ def get_variant_data(conf, var_name, variant, keys=None):
|
|||||||
|
|
||||||
|
|
||||||
def _apply_substitutions(compose, volid):
|
def _apply_substitutions(compose, volid):
|
||||||
substitutions = compose.conf['volume_id_substitutions'].items()
|
substitutions = compose.conf["volume_id_substitutions"].items()
|
||||||
# processing should start with the longest pattern, otherwise, we could
|
# processing should start with the longest pattern, otherwise, we could
|
||||||
# unexpectedly replace a substring of that longest pattern
|
# unexpectedly replace a substring of that longest pattern
|
||||||
for k, v in sorted(substitutions, key=lambda x: len(x[0]), reverse=True):
|
for k, v in sorted(substitutions, key=lambda x: len(x[0]), reverse=True):
|
||||||
@ -381,8 +403,7 @@ def _apply_substitutions(compose, volid):
|
|||||||
return volid
|
return volid
|
||||||
|
|
||||||
|
|
||||||
def get_volid(compose, arch, variant=None, disc_type=False,
|
def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwargs):
|
||||||
formats=None, **kwargs):
|
|
||||||
"""Get ISO volume ID for arch and variant"""
|
"""Get ISO volume ID for arch and variant"""
|
||||||
if variant and variant.type == "addon":
|
if variant and variant.type == "addon":
|
||||||
# addons are part of parent variant media
|
# addons are part of parent variant media
|
||||||
@ -398,13 +419,15 @@ def get_volid(compose, arch, variant=None, disc_type=False,
|
|||||||
else:
|
else:
|
||||||
release_short = compose.conf["release_short"]
|
release_short = compose.conf["release_short"]
|
||||||
release_version = compose.conf["release_version"]
|
release_version = compose.conf["release_version"]
|
||||||
release_is_layered = True if compose.conf.get("base_product_name", "") else False
|
release_is_layered = (
|
||||||
|
True if compose.conf.get("base_product_name", "") else False
|
||||||
|
)
|
||||||
base_product_short = compose.conf.get("base_product_short", "")
|
base_product_short = compose.conf.get("base_product_short", "")
|
||||||
base_product_version = compose.conf.get("base_product_version", "")
|
base_product_version = compose.conf.get("base_product_version", "")
|
||||||
variant_uid = variant and variant.uid or None
|
variant_uid = variant and variant.uid or None
|
||||||
|
|
||||||
products = compose.conf['image_volid_formats']
|
products = compose.conf["image_volid_formats"]
|
||||||
layered_products = compose.conf['image_volid_layered_product_formats']
|
layered_products = compose.conf["image_volid_layered_product_formats"]
|
||||||
|
|
||||||
volid = None
|
volid = None
|
||||||
if release_is_layered:
|
if release_is_layered:
|
||||||
@ -418,26 +441,32 @@ def get_volid(compose, arch, variant=None, disc_type=False,
|
|||||||
if not variant_uid and "%(variant)s" in i:
|
if not variant_uid and "%(variant)s" in i:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
args = get_format_substs(compose,
|
args = get_format_substs(
|
||||||
|
compose,
|
||||||
variant=variant_uid,
|
variant=variant_uid,
|
||||||
release_short=release_short,
|
release_short=release_short,
|
||||||
version=release_version,
|
version=release_version,
|
||||||
arch=arch,
|
arch=arch,
|
||||||
disc_type=disc_type or '',
|
disc_type=disc_type or "",
|
||||||
base_product_short=base_product_short,
|
base_product_short=base_product_short,
|
||||||
base_product_version=base_product_version,
|
base_product_version=base_product_version,
|
||||||
**kwargs)
|
**kwargs
|
||||||
|
)
|
||||||
volid = (i % args).format(**args)
|
volid = (i % args).format(**args)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise RuntimeError('Failed to create volume id: unknown format element: %s' % err)
|
raise RuntimeError(
|
||||||
|
"Failed to create volume id: unknown format element: %s" % err
|
||||||
|
)
|
||||||
volid = _apply_substitutions(compose, volid)
|
volid = _apply_substitutions(compose, volid)
|
||||||
if len(volid) <= 32:
|
if len(volid) <= 32:
|
||||||
break
|
break
|
||||||
tried.add(volid)
|
tried.add(volid)
|
||||||
|
|
||||||
if volid and len(volid) > 32:
|
if volid and len(volid) > 32:
|
||||||
raise ValueError("Could not create volume ID longer than 32 bytes, options are %r",
|
raise ValueError(
|
||||||
sorted(tried, key=len))
|
"Could not create volume ID longer than 32 bytes, options are %r",
|
||||||
|
sorted(tried, key=len),
|
||||||
|
)
|
||||||
|
|
||||||
if compose.conf["restricted_volid"]:
|
if compose.conf["restricted_volid"]:
|
||||||
# Replace all non-alphanumeric characters and non-underscores) with
|
# Replace all non-alphanumeric characters and non-underscores) with
|
||||||
@ -455,16 +484,22 @@ def get_file_size(path):
|
|||||||
return os.path.getsize(path)
|
return os.path.getsize(path)
|
||||||
|
|
||||||
|
|
||||||
def find_old_compose(old_compose_dirs, release_short, release_version,
|
def find_old_compose(
|
||||||
release_type_suffix, base_product_short=None,
|
old_compose_dirs,
|
||||||
base_product_version=None, allowed_statuses=None):
|
release_short,
|
||||||
|
release_version,
|
||||||
|
release_type_suffix,
|
||||||
|
base_product_short=None,
|
||||||
|
base_product_version=None,
|
||||||
|
allowed_statuses=None,
|
||||||
|
):
|
||||||
allowed_statuses = allowed_statuses or ("FINISHED", "FINISHED_INCOMPLETE", "DOOMED")
|
allowed_statuses = allowed_statuses or ("FINISHED", "FINISHED_INCOMPLETE", "DOOMED")
|
||||||
composes = []
|
composes = []
|
||||||
|
|
||||||
def _sortable(compose_id):
|
def _sortable(compose_id):
|
||||||
"""Convert ID to tuple where respin is an integer for proper sorting."""
|
"""Convert ID to tuple where respin is an integer for proper sorting."""
|
||||||
try:
|
try:
|
||||||
prefix, respin = compose_id.rsplit('.', 1)
|
prefix, respin = compose_id.rsplit(".", 1)
|
||||||
return (prefix, int(respin))
|
return (prefix, int(respin))
|
||||||
except Exception:
|
except Exception:
|
||||||
return compose_id
|
return compose_id
|
||||||
@ -486,7 +521,7 @@ def find_old_compose(old_compose_dirs, release_short, release_version,
|
|||||||
if not i.startswith(pattern):
|
if not i.startswith(pattern):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
suffix = i[len(pattern):]
|
suffix = i[len(pattern) :]
|
||||||
if len(suffix) < 2 or not suffix[1].isdigit():
|
if len(suffix) < 2 or not suffix[1].isdigit():
|
||||||
# This covers the case where we are looking for -updates, but there
|
# This covers the case where we are looking for -updates, but there
|
||||||
# is an updates-testing as well.
|
# is an updates-testing as well.
|
||||||
@ -504,7 +539,7 @@ def find_old_compose(old_compose_dirs, release_short, release_version,
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(status_path, 'r') as f:
|
with open(status_path, "r") as f:
|
||||||
if f.read().strip() in allowed_statuses:
|
if f.read().strip() in allowed_statuses:
|
||||||
composes.append((_sortable(i), os.path.abspath(path)))
|
composes.append((_sortable(i), os.path.abspath(path)))
|
||||||
except:
|
except:
|
||||||
@ -526,7 +561,9 @@ def process_args(fmt, args):
|
|||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def failable(compose, can_fail, variant, arch, deliverable, subvariant=None, logger=None):
|
def failable(
|
||||||
|
compose, can_fail, variant, arch, deliverable, subvariant=None, logger=None
|
||||||
|
):
|
||||||
"""If a deliverable can fail, log a message and go on as if it succeeded."""
|
"""If a deliverable can fail, log a message and go on as if it succeeded."""
|
||||||
if not logger:
|
if not logger:
|
||||||
logger = compose._logger
|
logger = compose._logger
|
||||||
@ -540,17 +577,21 @@ def failable(compose, can_fail, variant, arch, deliverable, subvariant=None, log
|
|||||||
if not can_fail:
|
if not can_fail:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
log_failed_task(compose, variant, arch, deliverable, subvariant, logger=logger, exc=exc)
|
log_failed_task(
|
||||||
|
compose, variant, arch, deliverable, subvariant, logger=logger, exc=exc
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def log_failed_task(compose, variant, arch, deliverable, subvariant, logger=None, exc=None):
|
def log_failed_task(
|
||||||
|
compose, variant, arch, deliverable, subvariant, logger=None, exc=None
|
||||||
|
):
|
||||||
logger = logger or compose._logger
|
logger = logger or compose._logger
|
||||||
msg = deliverable.replace('-', ' ').capitalize()
|
msg = deliverable.replace("-", " ").capitalize()
|
||||||
compose.fail_deliverable(variant, arch, deliverable, subvariant)
|
compose.fail_deliverable(variant, arch, deliverable, subvariant)
|
||||||
ident = 'variant %s, arch %s' % (variant.uid if variant else 'None', arch)
|
ident = "variant %s, arch %s" % (variant.uid if variant else "None", arch)
|
||||||
if subvariant:
|
if subvariant:
|
||||||
ident += ', subvariant %s' % subvariant
|
ident += ", subvariant %s" % subvariant
|
||||||
logger.error('[FAIL] %s (%s) failed, but going on anyway.' % (msg, ident))
|
logger.error("[FAIL] %s (%s) failed, but going on anyway." % (msg, ident))
|
||||||
if exc:
|
if exc:
|
||||||
logger.error(str(exc))
|
logger.error(str(exc))
|
||||||
tb = traceback.format_exc()
|
tb = traceback.format_exc()
|
||||||
@ -559,7 +600,7 @@ def log_failed_task(compose, variant, arch, deliverable, subvariant, logger=None
|
|||||||
|
|
||||||
def can_arch_fail(failable_arches, arch):
|
def can_arch_fail(failable_arches, arch):
|
||||||
"""Check if `arch` is in `failable_arches` or `*` can fail."""
|
"""Check if `arch` is in `failable_arches` or `*` can fail."""
|
||||||
return '*' in failable_arches or arch in failable_arches
|
return "*" in failable_arches or arch in failable_arches
|
||||||
|
|
||||||
|
|
||||||
def get_format_substs(compose, **kwargs):
|
def get_format_substs(compose, **kwargs):
|
||||||
@ -568,15 +609,15 @@ def get_format_substs(compose, **kwargs):
|
|||||||
Any kwargs will be added as well.
|
Any kwargs will be added as well.
|
||||||
"""
|
"""
|
||||||
substs = {
|
substs = {
|
||||||
'compose_id': compose.compose_id,
|
"compose_id": compose.compose_id,
|
||||||
'release_short': compose.ci_base.release.short,
|
"release_short": compose.ci_base.release.short,
|
||||||
'version': compose.ci_base.release.version,
|
"version": compose.ci_base.release.version,
|
||||||
'date': compose.compose_date,
|
"date": compose.compose_date,
|
||||||
'respin': compose.compose_respin,
|
"respin": compose.compose_respin,
|
||||||
'type': compose.compose_type,
|
"type": compose.compose_type,
|
||||||
'type_suffix': compose.compose_type_suffix,
|
"type_suffix": compose.compose_type_suffix,
|
||||||
'label': compose.compose_label,
|
"label": compose.compose_label,
|
||||||
'label_major_version': compose.compose_label_major_version,
|
"label_major_version": compose.compose_label_major_version,
|
||||||
}
|
}
|
||||||
substs.update(kwargs)
|
substs.update(kwargs)
|
||||||
return substs
|
return substs
|
||||||
@ -603,7 +644,7 @@ def copy_all(src, dest):
|
|||||||
"""
|
"""
|
||||||
contents = os.listdir(src)
|
contents = os.listdir(src)
|
||||||
if not contents:
|
if not contents:
|
||||||
raise RuntimeError('Source directory %s is empty.' % src)
|
raise RuntimeError("Source directory %s is empty." % src)
|
||||||
makedirs(dest)
|
makedirs(dest)
|
||||||
for item in contents:
|
for item in contents:
|
||||||
source = os.path.join(src, item)
|
source = os.path.join(src, item)
|
||||||
@ -651,9 +692,9 @@ def levenshtein(a, b):
|
|||||||
for j in range(1, len(b) + 1):
|
for j in range(1, len(b) + 1):
|
||||||
for i in range(1, len(a) + 1):
|
for i in range(1, len(a) + 1):
|
||||||
cost = 0 if a[i - 1] == b[j - 1] else 1
|
cost = 0 if a[i - 1] == b[j - 1] else 1
|
||||||
mat[j][i] = min(mat[j - 1][i] + 1,
|
mat[j][i] = min(
|
||||||
mat[j][i - 1] + 1,
|
mat[j - 1][i] + 1, mat[j][i - 1] + 1, mat[j - 1][i - 1] + cost
|
||||||
mat[j - 1][i - 1] + cost)
|
)
|
||||||
|
|
||||||
return mat[len(b)][len(a)]
|
return mat[len(b)][len(a)]
|
||||||
|
|
||||||
@ -661,10 +702,10 @@ def levenshtein(a, b):
|
|||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def temp_dir(log=None, *args, **kwargs):
|
def temp_dir(log=None, *args, **kwargs):
|
||||||
"""Create a temporary directory and ensure it's deleted."""
|
"""Create a temporary directory and ensure it's deleted."""
|
||||||
if kwargs.get('dir'):
|
if kwargs.get("dir"):
|
||||||
# If we are supposed to create the temp dir in a particular location,
|
# If we are supposed to create the temp dir in a particular location,
|
||||||
# ensure the location already exists.
|
# ensure the location already exists.
|
||||||
makedirs(kwargs['dir'])
|
makedirs(kwargs["dir"])
|
||||||
dir = tempfile.mkdtemp(*args, **kwargs)
|
dir = tempfile.mkdtemp(*args, **kwargs)
|
||||||
try:
|
try:
|
||||||
yield dir
|
yield dir
|
||||||
@ -674,7 +715,7 @@ def temp_dir(log=None, *args, **kwargs):
|
|||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
# Okay, we failed to delete temporary dir.
|
# Okay, we failed to delete temporary dir.
|
||||||
if log:
|
if log:
|
||||||
log.warning('Error removing %s: %s', dir, exc.strerror)
|
log.warning("Error removing %s: %s", dir, exc.strerror)
|
||||||
|
|
||||||
|
|
||||||
def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
|
def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
|
||||||
@ -687,33 +728,41 @@ def run_unmount_cmd(cmd, max_retries=10, path=None, logger=None):
|
|||||||
printed in case of failure.
|
printed in case of failure.
|
||||||
"""
|
"""
|
||||||
for i in range(max_retries):
|
for i in range(max_retries):
|
||||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
proc = subprocess.Popen(
|
||||||
universal_newlines=True)
|
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True
|
||||||
|
)
|
||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
if proc.returncode == 0:
|
if proc.returncode == 0:
|
||||||
# We were successful
|
# We were successful
|
||||||
return
|
return
|
||||||
if 'Device or resource busy' not in err:
|
if "Device or resource busy" not in err:
|
||||||
raise RuntimeError('Unhandled error when running %r: %r' % (cmd, err))
|
raise RuntimeError("Unhandled error when running %r: %r" % (cmd, err))
|
||||||
time.sleep(i)
|
time.sleep(i)
|
||||||
# Still busy, there's something wrong.
|
# Still busy, there's something wrong.
|
||||||
if path and logger:
|
if path and logger:
|
||||||
commands = [
|
commands = [
|
||||||
['ls', '-lA', path],
|
["ls", "-lA", path],
|
||||||
['fuser', '-vm', path],
|
["fuser", "-vm", path],
|
||||||
['lsof', '+D', path],
|
["lsof", "+D", path],
|
||||||
]
|
]
|
||||||
for c in commands:
|
for c in commands:
|
||||||
try:
|
try:
|
||||||
proc = subprocess.Popen(c, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
proc = subprocess.Popen(
|
||||||
universal_newlines=True)
|
c,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
out, _ = proc.communicate()
|
out, _ = proc.communicate()
|
||||||
logger.debug('`%s` exited with %s and following output:\n%s',
|
logger.debug(
|
||||||
' '.join(c), proc.returncode, out)
|
"`%s` exited with %s and following output:\n%s",
|
||||||
|
" ".join(c),
|
||||||
|
proc.returncode,
|
||||||
|
out,
|
||||||
|
)
|
||||||
except OSError:
|
except OSError:
|
||||||
logger.debug('`%s` command not available for debugging',
|
logger.debug("`%s` command not available for debugging", " ".join(c))
|
||||||
' '.join(c))
|
raise RuntimeError("Failed to run %r: Device or resource busy." % cmd)
|
||||||
raise RuntimeError('Failed to run %r: Device or resource busy.' % cmd)
|
|
||||||
|
|
||||||
|
|
||||||
def translate_path_raw(mapping, path):
|
def translate_path_raw(mapping, path):
|
||||||
@ -721,7 +770,7 @@ def translate_path_raw(mapping, path):
|
|||||||
for prefix, newvalue in mapping:
|
for prefix, newvalue in mapping:
|
||||||
prefix = os.path.normpath(prefix)
|
prefix = os.path.normpath(prefix)
|
||||||
# Strip trailing slashes: the prefix has them stripped by `normpath`.
|
# Strip trailing slashes: the prefix has them stripped by `normpath`.
|
||||||
newvalue = newvalue.rstrip('/')
|
newvalue = newvalue.rstrip("/")
|
||||||
if normpath.startswith(prefix):
|
if normpath.startswith(prefix):
|
||||||
# We can't call os.path.normpath on result since it is not actually
|
# We can't call os.path.normpath on result since it is not actually
|
||||||
# a path - http:// would get changed to http:/ and so on.
|
# a path - http:// would get changed to http:/ and so on.
|
||||||
@ -739,7 +788,7 @@ def translate_path(compose, path):
|
|||||||
return translate_path_raw(mapping, path)
|
return translate_path_raw(mapping, path)
|
||||||
|
|
||||||
|
|
||||||
def get_repo_url(compose, repo, arch='$basearch'):
|
def get_repo_url(compose, repo, arch="$basearch"):
|
||||||
"""
|
"""
|
||||||
Convert repo to repo URL.
|
Convert repo to repo URL.
|
||||||
|
|
||||||
@ -751,25 +800,27 @@ def get_repo_url(compose, repo, arch='$basearch'):
|
|||||||
"""
|
"""
|
||||||
if isinstance(repo, dict):
|
if isinstance(repo, dict):
|
||||||
try:
|
try:
|
||||||
repo = repo['baseurl']
|
repo = repo["baseurl"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise RuntimeError('Baseurl is required in repo dict %s' % str(repo))
|
raise RuntimeError("Baseurl is required in repo dict %s" % str(repo))
|
||||||
if repo.startswith("/"):
|
if repo.startswith("/"):
|
||||||
# It's an absolute path, translate it and return it
|
# It's an absolute path, translate it and return it
|
||||||
return translate_path(compose, repo)
|
return translate_path(compose, repo)
|
||||||
if '://' not in repo:
|
if "://" not in repo:
|
||||||
# this is a variant name
|
# this is a variant name
|
||||||
if compose is not None:
|
if compose is not None:
|
||||||
v = compose.all_variants.get(repo)
|
v = compose.all_variants.get(repo)
|
||||||
if not v:
|
if not v:
|
||||||
raise RuntimeError('There is no variant %s to get repo from.' % repo)
|
raise RuntimeError("There is no variant %s to get repo from." % repo)
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
repo = translate_path(compose, compose.paths.compose.repository(arch, v, create_dir=False))
|
repo = translate_path(
|
||||||
|
compose, compose.paths.compose.repository(arch, v, create_dir=False)
|
||||||
|
)
|
||||||
return repo
|
return repo
|
||||||
|
|
||||||
|
|
||||||
def get_repo_urls(compose, repos, arch='$basearch', logger=None):
|
def get_repo_urls(compose, repos, arch="$basearch", logger=None):
|
||||||
"""
|
"""
|
||||||
Convert repos to a list of repo URLs.
|
Convert repos to a list of repo URLs.
|
||||||
|
|
||||||
@ -782,7 +833,10 @@ def get_repo_urls(compose, repos, arch='$basearch', logger=None):
|
|||||||
repo = get_repo_url(compose, repo, arch=arch)
|
repo = get_repo_url(compose, repo, arch=arch)
|
||||||
if repo is None:
|
if repo is None:
|
||||||
if logger:
|
if logger:
|
||||||
logger.log_warning("Variant-type source repository is deprecated and will be ignored during 'OSTreeInstaller' phase: %s" % (repo))
|
logger.log_warning(
|
||||||
|
"Variant-type source repository is deprecated and will be ignored during 'OSTreeInstaller' phase: %s"
|
||||||
|
% (repo)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
urls.append(repo)
|
urls.append(repo)
|
||||||
return urls
|
return urls
|
||||||
@ -792,8 +846,8 @@ def _translate_url_to_repo_id(url):
|
|||||||
"""
|
"""
|
||||||
Translate url to valid repo id by replacing any invalid char to '_'.
|
Translate url to valid repo id by replacing any invalid char to '_'.
|
||||||
"""
|
"""
|
||||||
_REPOID_CHARS = string.ascii_letters + string.digits + '-_.:'
|
_REPOID_CHARS = string.ascii_letters + string.digits + "-_.:"
|
||||||
return ''.join([s if s in list(_REPOID_CHARS) else '_' for s in url])
|
return "".join([s if s in list(_REPOID_CHARS) else "_" for s in url])
|
||||||
|
|
||||||
|
|
||||||
def get_repo_dict(repo):
|
def get_repo_dict(repo):
|
||||||
@ -809,23 +863,23 @@ def get_repo_dict(repo):
|
|||||||
"""
|
"""
|
||||||
repo_dict = {}
|
repo_dict = {}
|
||||||
if isinstance(repo, dict):
|
if isinstance(repo, dict):
|
||||||
url = repo['baseurl']
|
url = repo["baseurl"]
|
||||||
name = repo.get('name', None)
|
name = repo.get("name", None)
|
||||||
if '://' in url:
|
if "://" in url:
|
||||||
if name is None:
|
if name is None:
|
||||||
name = _translate_url_to_repo_id(url)
|
name = _translate_url_to_repo_id(url)
|
||||||
else:
|
else:
|
||||||
# url is variant uid - this possibility is now discontinued
|
# url is variant uid - this possibility is now discontinued
|
||||||
return {}
|
return {}
|
||||||
repo['name'] = name
|
repo["name"] = name
|
||||||
repo['baseurl'] = url
|
repo["baseurl"] = url
|
||||||
return repo
|
return repo
|
||||||
else:
|
else:
|
||||||
# repo is normal url or variant uid
|
# repo is normal url or variant uid
|
||||||
repo_dict = {}
|
repo_dict = {}
|
||||||
if '://' in repo:
|
if "://" in repo:
|
||||||
repo_dict['name'] = _translate_url_to_repo_id(repo)
|
repo_dict["name"] = _translate_url_to_repo_id(repo)
|
||||||
repo_dict['baseurl'] = repo
|
repo_dict["baseurl"] = repo
|
||||||
else:
|
else:
|
||||||
return {}
|
return {}
|
||||||
return repo_dict
|
return repo_dict
|
||||||
@ -842,7 +896,10 @@ def get_repo_dicts(repos, logger=None):
|
|||||||
repo_dict = get_repo_dict(repo)
|
repo_dict = get_repo_dict(repo)
|
||||||
if repo_dict == {}:
|
if repo_dict == {}:
|
||||||
if logger:
|
if logger:
|
||||||
logger.log_warning("Variant-type source repository is deprecated and will be ignored during 'OSTree' phase: %s" % (repo))
|
logger.log_warning(
|
||||||
|
"Variant-type source repository is deprecated and will be ignored during 'OSTree' phase: %s"
|
||||||
|
% (repo)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
repo_dicts.append(repo_dict)
|
repo_dicts.append(repo_dict)
|
||||||
return repo_dicts
|
return repo_dicts
|
||||||
@ -852,19 +909,21 @@ def version_generator(compose, gen):
|
|||||||
"""If ``gen`` is a known generator, create a value. Otherwise return
|
"""If ``gen`` is a known generator, create a value. Otherwise return
|
||||||
the argument value unchanged.
|
the argument value unchanged.
|
||||||
"""
|
"""
|
||||||
if gen == '!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN':
|
if gen == "!OSTREE_VERSION_FROM_LABEL_DATE_TYPE_RESPIN":
|
||||||
return '%s.%s' % (compose.image_version, compose.image_release)
|
return "%s.%s" % (compose.image_version, compose.image_release)
|
||||||
elif gen == '!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN':
|
elif gen == "!RELEASE_FROM_LABEL_DATE_TYPE_RESPIN":
|
||||||
return compose.image_release
|
return compose.image_release
|
||||||
elif gen == '!RELEASE_FROM_DATE_RESPIN':
|
elif gen == "!RELEASE_FROM_DATE_RESPIN":
|
||||||
return '%s.%s' % (compose.compose_date, compose.compose_respin)
|
return "%s.%s" % (compose.compose_date, compose.compose_respin)
|
||||||
elif gen == '!VERSION_FROM_VERSION_DATE_RESPIN':
|
elif gen == "!VERSION_FROM_VERSION_DATE_RESPIN":
|
||||||
return '%s.%s.%s' % (compose.ci_base.release.version,
|
return "%s.%s.%s" % (
|
||||||
|
compose.ci_base.release.version,
|
||||||
compose.compose_date,
|
compose.compose_date,
|
||||||
compose.compose_respin)
|
compose.compose_respin,
|
||||||
elif gen == '!VERSION_FROM_VERSION':
|
)
|
||||||
return '%s' % (compose.ci_base.release.version)
|
elif gen == "!VERSION_FROM_VERSION":
|
||||||
elif gen and gen[0] == '!':
|
return "%s" % (compose.ci_base.release.version)
|
||||||
|
elif gen and gen[0] == "!":
|
||||||
raise RuntimeError("Unknown version generator '%s'" % gen)
|
raise RuntimeError("Unknown version generator '%s'" % gen)
|
||||||
return gen
|
return gen
|
||||||
|
|
||||||
@ -873,6 +932,7 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
|||||||
""" A decorator that allows to retry a section of code until success or
|
""" A decorator that allows to retry a section of code until success or
|
||||||
timeout.
|
timeout.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(function):
|
def wrapper(function):
|
||||||
@functools.wraps(function)
|
@functools.wraps(function)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
@ -884,13 +944,15 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
|||||||
return function(*args, **kwargs)
|
return function(*args, **kwargs)
|
||||||
except wait_on:
|
except wait_on:
|
||||||
time.sleep(interval)
|
time.sleep(interval)
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@retry(wait_on=RuntimeError)
|
@retry(wait_on=RuntimeError)
|
||||||
def git_ls_remote(baseurl, ref):
|
def git_ls_remote(baseurl, ref):
|
||||||
return run(['git', 'ls-remote', baseurl, ref], universal_newlines=True)
|
return run(["git", "ls-remote", baseurl, ref], universal_newlines=True)
|
||||||
|
|
||||||
|
|
||||||
def get_tz_offset():
|
def get_tz_offset():
|
||||||
|
@ -41,12 +41,14 @@ if sys.version_info[:2] < (2, 7):
|
|||||||
xml.dom.minidom.Element = Element
|
xml.dom.minidom.Element = Element
|
||||||
|
|
||||||
|
|
||||||
TYPE_MAPPING = collections.OrderedDict([
|
TYPE_MAPPING = collections.OrderedDict(
|
||||||
(libcomps.PACKAGE_TYPE_MANDATORY, 'mandatory'),
|
[
|
||||||
(libcomps.PACKAGE_TYPE_DEFAULT, 'default'),
|
(libcomps.PACKAGE_TYPE_MANDATORY, "mandatory"),
|
||||||
(libcomps.PACKAGE_TYPE_OPTIONAL, 'optional'),
|
(libcomps.PACKAGE_TYPE_DEFAULT, "default"),
|
||||||
(libcomps.PACKAGE_TYPE_CONDITIONAL, 'conditional'),
|
(libcomps.PACKAGE_TYPE_OPTIONAL, "optional"),
|
||||||
])
|
(libcomps.PACKAGE_TYPE_CONDITIONAL, "conditional"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CompsValidationError(ValueError):
|
class CompsValidationError(ValueError):
|
||||||
@ -89,10 +91,13 @@ class CompsFilter(object):
|
|||||||
If only_arch is set, then only packages for the specified arch are preserved.
|
If only_arch is set, then only packages for the specified arch are preserved.
|
||||||
Multiple arches separated by comma can be specified in the XML.
|
Multiple arches separated by comma can be specified in the XML.
|
||||||
"""
|
"""
|
||||||
self._filter_elements_by_attr("/comps/group/packagelist/packagereq", 'arch', arch, only_arch)
|
self._filter_elements_by_attr(
|
||||||
|
"/comps/group/packagelist/packagereq", "arch", arch, only_arch
|
||||||
|
)
|
||||||
if variant:
|
if variant:
|
||||||
self._filter_elements_by_attr("/comps/group/packagelist/packagereq",
|
self._filter_elements_by_attr(
|
||||||
'variant', variant, only_arch)
|
"/comps/group/packagelist/packagereq", "variant", variant, only_arch
|
||||||
|
)
|
||||||
|
|
||||||
def filter_groups(self, arch, variant, only_arch=False):
|
def filter_groups(self, arch, variant, only_arch=False):
|
||||||
"""
|
"""
|
||||||
@ -100,9 +105,9 @@ class CompsFilter(object):
|
|||||||
If only_arch is set, then only groups for the specified arch are preserved.
|
If only_arch is set, then only groups for the specified arch are preserved.
|
||||||
Multiple arches separated by comma can be specified in the XML.
|
Multiple arches separated by comma can be specified in the XML.
|
||||||
"""
|
"""
|
||||||
self._filter_elements_by_attr("/comps/group", 'arch', arch, only_arch)
|
self._filter_elements_by_attr("/comps/group", "arch", arch, only_arch)
|
||||||
if variant:
|
if variant:
|
||||||
self._filter_elements_by_attr("/comps/group", 'variant', variant, only_arch)
|
self._filter_elements_by_attr("/comps/group", "variant", variant, only_arch)
|
||||||
|
|
||||||
def filter_environments(self, arch, variant, only_arch=False):
|
def filter_environments(self, arch, variant, only_arch=False):
|
||||||
"""
|
"""
|
||||||
@ -110,9 +115,11 @@ class CompsFilter(object):
|
|||||||
If only_arch is set, then only environments for the specified arch are preserved.
|
If only_arch is set, then only environments for the specified arch are preserved.
|
||||||
Multiple arches separated by comma can be specified in the XML.
|
Multiple arches separated by comma can be specified in the XML.
|
||||||
"""
|
"""
|
||||||
self._filter_elements_by_attr("/comps/environment", 'arch', arch, only_arch)
|
self._filter_elements_by_attr("/comps/environment", "arch", arch, only_arch)
|
||||||
if variant:
|
if variant:
|
||||||
self._filter_elements_by_attr("/comps/environment", 'variant', variant, only_arch)
|
self._filter_elements_by_attr(
|
||||||
|
"/comps/environment", "variant", variant, only_arch
|
||||||
|
)
|
||||||
|
|
||||||
def filter_category_groups(self):
|
def filter_category_groups(self):
|
||||||
"""
|
"""
|
||||||
@ -196,7 +203,12 @@ class CompsFilter(object):
|
|||||||
i.getparent().remove(i)
|
i.getparent().remove(i)
|
||||||
|
|
||||||
def write(self, file_obj):
|
def write(self, file_obj):
|
||||||
self.tree.write(file_obj, pretty_print=self.reindent, xml_declaration=True, encoding=self.encoding)
|
self.tree.write(
|
||||||
|
file_obj,
|
||||||
|
pretty_print=self.reindent,
|
||||||
|
xml_declaration=True,
|
||||||
|
encoding=self.encoding,
|
||||||
|
)
|
||||||
file_obj.write(b"\n")
|
file_obj.write(b"\n")
|
||||||
|
|
||||||
def cleanup(self, keep_groups=[], lookaside_groups=[]):
|
def cleanup(self, keep_groups=[], lookaside_groups=[]):
|
||||||
@ -235,7 +247,7 @@ class CompsWrapper(object):
|
|||||||
for grp in self.comps.groups:
|
for grp in self.comps.groups:
|
||||||
if grp.id == group:
|
if grp.id == group:
|
||||||
return [pkg.name for pkg in grp.packages]
|
return [pkg.name for pkg in grp.packages]
|
||||||
raise KeyError('No such group %r' % group)
|
raise KeyError("No such group %r" % group)
|
||||||
|
|
||||||
def get_langpacks(self):
|
def get_langpacks(self):
|
||||||
langpacks = {}
|
langpacks = {}
|
||||||
@ -273,11 +285,13 @@ class CompsWrapper(object):
|
|||||||
|
|
||||||
def generate_comps(self):
|
def generate_comps(self):
|
||||||
impl = xml.dom.minidom.getDOMImplementation()
|
impl = xml.dom.minidom.getDOMImplementation()
|
||||||
doctype = impl.createDocumentType("comps", "-//Red Hat, Inc.//DTD Comps info//EN", "comps.dtd")
|
doctype = impl.createDocumentType(
|
||||||
|
"comps", "-//Red Hat, Inc.//DTD Comps info//EN", "comps.dtd"
|
||||||
|
)
|
||||||
doc = impl.createDocument(None, "comps", doctype)
|
doc = impl.createDocument(None, "comps", doctype)
|
||||||
msg_elem = doc.documentElement
|
msg_elem = doc.documentElement
|
||||||
|
|
||||||
for group in sorted(self.comps.groups, key=attrgetter('id')):
|
for group in sorted(self.comps.groups, key=attrgetter("id")):
|
||||||
group_node = doc.createElement("group")
|
group_node = doc.createElement("group")
|
||||||
msg_elem.appendChild(group_node)
|
msg_elem.appendChild(group_node)
|
||||||
|
|
||||||
@ -294,13 +308,14 @@ class CompsWrapper(object):
|
|||||||
for pkg in group.packages:
|
for pkg in group.packages:
|
||||||
if pkg.type == libcomps.PACKAGE_TYPE_UNKNOWN:
|
if pkg.type == libcomps.PACKAGE_TYPE_UNKNOWN:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
'Failed to process comps file. Package %s in group %s has unknown type'
|
"Failed to process comps file. Package %s in group %s has unknown type"
|
||||||
% (pkg.name, group.id))
|
% (pkg.name, group.id)
|
||||||
|
)
|
||||||
|
|
||||||
packages_by_type[TYPE_MAPPING[pkg.type]].append(pkg)
|
packages_by_type[TYPE_MAPPING[pkg.type]].append(pkg)
|
||||||
|
|
||||||
for type_name in TYPE_MAPPING.values():
|
for type_name in TYPE_MAPPING.values():
|
||||||
for pkg in sorted(packages_by_type[type_name], key=attrgetter('name')):
|
for pkg in sorted(packages_by_type[type_name], key=attrgetter("name")):
|
||||||
kwargs = {"type": type_name}
|
kwargs = {"type": type_name}
|
||||||
if type_name == "conditional":
|
if type_name == "conditional":
|
||||||
kwargs["requires"] = pkg.requires
|
kwargs["requires"] = pkg.requires
|
||||||
@ -309,7 +324,9 @@ class CompsWrapper(object):
|
|||||||
group_node.appendChild(packagelist)
|
group_node.appendChild(packagelist)
|
||||||
|
|
||||||
for category in self.comps.categories:
|
for category in self.comps.categories:
|
||||||
groups = set(x.name for x in category.group_ids) & set(self.get_comps_groups())
|
groups = set(x.name for x in category.group_ids) & set(
|
||||||
|
self.get_comps_groups()
|
||||||
|
)
|
||||||
if not groups:
|
if not groups:
|
||||||
continue
|
continue
|
||||||
cat_node = doc.createElement("category")
|
cat_node = doc.createElement("category")
|
||||||
@ -322,7 +339,7 @@ class CompsWrapper(object):
|
|||||||
|
|
||||||
append_grouplist(doc, cat_node, groups)
|
append_grouplist(doc, cat_node, groups)
|
||||||
|
|
||||||
for environment in sorted(self.comps.environments, key=attrgetter('id')):
|
for environment in sorted(self.comps.environments, key=attrgetter("id")):
|
||||||
groups = set(x.name for x in environment.group_ids)
|
groups = set(x.name for x in environment.group_ids)
|
||||||
if not groups:
|
if not groups:
|
||||||
continue
|
continue
|
||||||
@ -337,14 +354,25 @@ class CompsWrapper(object):
|
|||||||
append_grouplist(doc, env_node, groups)
|
append_grouplist(doc, env_node, groups)
|
||||||
|
|
||||||
if environment.option_ids:
|
if environment.option_ids:
|
||||||
append_grouplist(doc, env_node, (x.name for x in environment.option_ids), "optionlist")
|
append_grouplist(
|
||||||
|
doc,
|
||||||
|
env_node,
|
||||||
|
(x.name for x in environment.option_ids),
|
||||||
|
"optionlist",
|
||||||
|
)
|
||||||
|
|
||||||
if self.comps.langpacks:
|
if self.comps.langpacks:
|
||||||
lang_node = doc.createElement("langpacks")
|
lang_node = doc.createElement("langpacks")
|
||||||
msg_elem.appendChild(lang_node)
|
msg_elem.appendChild(lang_node)
|
||||||
|
|
||||||
for name in sorted(self.comps.langpacks):
|
for name in sorted(self.comps.langpacks):
|
||||||
append(doc, lang_node, "match", name=name, install=self.comps.langpacks[name])
|
append(
|
||||||
|
doc,
|
||||||
|
lang_node,
|
||||||
|
"match",
|
||||||
|
name=name,
|
||||||
|
install=self.comps.langpacks[name],
|
||||||
|
)
|
||||||
|
|
||||||
return doc
|
return doc
|
||||||
|
|
||||||
@ -446,7 +474,7 @@ def append_common_info(doc, parent, obj, force_description=False):
|
|||||||
append(doc, parent, "name", text, lang=lang)
|
append(doc, parent, "name", text, lang=lang)
|
||||||
|
|
||||||
if obj.desc or force_description:
|
if obj.desc or force_description:
|
||||||
append(doc, parent, "description", obj.desc or '')
|
append(doc, parent, "description", obj.desc or "")
|
||||||
|
|
||||||
for lang in sorted(obj.desc_by_lang):
|
for lang in sorted(obj.desc_by_lang):
|
||||||
text = obj.desc_by_lang[lang]
|
text = obj.desc_by_lang[lang]
|
||||||
|
@ -28,13 +28,37 @@ class CreaterepoWrapper(object):
|
|||||||
self.mergerepo = "mergerepo"
|
self.mergerepo = "mergerepo"
|
||||||
self.modifyrepo = "modifyrepo"
|
self.modifyrepo = "modifyrepo"
|
||||||
|
|
||||||
def get_createrepo_cmd(self, directory, baseurl=None, outputdir=None, basedir=None, excludes=None,
|
def get_createrepo_cmd(
|
||||||
pkglist=None, groupfile=None, cachedir=None, update=True,
|
self,
|
||||||
update_md_path=None, skip_stat=False, checkts=False, split=False,
|
directory,
|
||||||
pretty=True, database=True, checksum=None, unique_md_filenames=True,
|
baseurl=None,
|
||||||
distro=None, content=None, repo=None, revision=None, deltas=False,
|
outputdir=None,
|
||||||
oldpackagedirs=None, num_deltas=None, workers=None, use_xz=False,
|
basedir=None,
|
||||||
compress_type=None, extra_args=None):
|
excludes=None,
|
||||||
|
pkglist=None,
|
||||||
|
groupfile=None,
|
||||||
|
cachedir=None,
|
||||||
|
update=True,
|
||||||
|
update_md_path=None,
|
||||||
|
skip_stat=False,
|
||||||
|
checkts=False,
|
||||||
|
split=False,
|
||||||
|
pretty=True,
|
||||||
|
database=True,
|
||||||
|
checksum=None,
|
||||||
|
unique_md_filenames=True,
|
||||||
|
distro=None,
|
||||||
|
content=None,
|
||||||
|
repo=None,
|
||||||
|
revision=None,
|
||||||
|
deltas=False,
|
||||||
|
oldpackagedirs=None,
|
||||||
|
num_deltas=None,
|
||||||
|
workers=None,
|
||||||
|
use_xz=False,
|
||||||
|
compress_type=None,
|
||||||
|
extra_args=None,
|
||||||
|
):
|
||||||
# groupfile = /path/to/comps.xml
|
# groupfile = /path/to/comps.xml
|
||||||
|
|
||||||
cmd = [self.createrepo, directory]
|
cmd = [self.createrepo, directory]
|
||||||
@ -129,7 +153,15 @@ class CreaterepoWrapper(object):
|
|||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_mergerepo_cmd(self, outputdir, repos, database=True, pkglist=None, nogroups=False, noupdateinfo=None):
|
def get_mergerepo_cmd(
|
||||||
|
self,
|
||||||
|
outputdir,
|
||||||
|
repos,
|
||||||
|
database=True,
|
||||||
|
pkglist=None,
|
||||||
|
nogroups=False,
|
||||||
|
noupdateinfo=None,
|
||||||
|
):
|
||||||
cmd = [self.mergerepo]
|
cmd = [self.mergerepo]
|
||||||
|
|
||||||
cmd.append("--outputdir=%s" % outputdir)
|
cmd.append("--outputdir=%s" % outputdir)
|
||||||
@ -156,7 +188,9 @@ class CreaterepoWrapper(object):
|
|||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_modifyrepo_cmd(self, repo_path, file_path, mdtype=None, compress_type=None, remove=False):
|
def get_modifyrepo_cmd(
|
||||||
|
self, repo_path, file_path, mdtype=None, compress_type=None, remove=False
|
||||||
|
):
|
||||||
cmd = [self.modifyrepo]
|
cmd = [self.modifyrepo]
|
||||||
|
|
||||||
cmd.append(file_path)
|
cmd.append(file_path)
|
||||||
|
@ -26,12 +26,7 @@ Pungi).
|
|||||||
|
|
||||||
|
|
||||||
def get_cmd(
|
def get_cmd(
|
||||||
conf_file,
|
conf_file, arch, repos, lookasides, platform=None, filter_packages=None,
|
||||||
arch,
|
|
||||||
repos,
|
|
||||||
lookasides,
|
|
||||||
platform=None,
|
|
||||||
filter_packages=None,
|
|
||||||
):
|
):
|
||||||
cmd = ["fus", "--verbose", "--arch", arch]
|
cmd = ["fus", "--verbose", "--arch", arch]
|
||||||
|
|
||||||
@ -64,7 +59,7 @@ def write_config(conf_file, modules, packages):
|
|||||||
def _prep_path(path):
|
def _prep_path(path):
|
||||||
"""Strip file:// from the path if present."""
|
"""Strip file:// from the path if present."""
|
||||||
if path.startswith("file://"):
|
if path.startswith("file://"):
|
||||||
return path[len("file://"):]
|
return path[len("file://") :]
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,76 +30,88 @@ def get_boot_options(arch, createfrom, efi=True, hfs_compat=True):
|
|||||||
result = []
|
result = []
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if arch in ("aarch64", ):
|
if arch in ("aarch64",):
|
||||||
result = [
|
result = [
|
||||||
'-eltorito-alt-boot',
|
"-eltorito-alt-boot",
|
||||||
'-e', 'images/efiboot.img',
|
"-e",
|
||||||
'-no-emul-boot',
|
"images/efiboot.img",
|
||||||
|
"-no-emul-boot",
|
||||||
]
|
]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if arch in ("i386", "i686", "x86_64"):
|
if arch in ("i386", "i686", "x86_64"):
|
||||||
result = [
|
result = [
|
||||||
'-b', 'isolinux/isolinux.bin',
|
"-b",
|
||||||
'-c', 'isolinux/boot.cat',
|
"isolinux/isolinux.bin",
|
||||||
'-no-emul-boot',
|
"-c",
|
||||||
'-boot-load-size', '4',
|
"isolinux/boot.cat",
|
||||||
'-boot-info-table',
|
"-no-emul-boot",
|
||||||
|
"-boot-load-size",
|
||||||
|
"4",
|
||||||
|
"-boot-info-table",
|
||||||
]
|
]
|
||||||
|
|
||||||
# EFI args
|
# EFI args
|
||||||
if arch == "x86_64":
|
if arch == "x86_64":
|
||||||
result.extend([
|
result.extend(
|
||||||
'-eltorito-alt-boot',
|
["-eltorito-alt-boot", "-e", "images/efiboot.img", "-no-emul-boot"]
|
||||||
'-e', 'images/efiboot.img',
|
)
|
||||||
'-no-emul-boot',
|
|
||||||
])
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if arch == "ia64":
|
if arch == "ia64":
|
||||||
result = [
|
result = [
|
||||||
'-b', 'images/boot.img',
|
"-b",
|
||||||
'-no-emul-boot',
|
"images/boot.img",
|
||||||
|
"-no-emul-boot",
|
||||||
]
|
]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if arch in ("ppc", "ppc64") or (arch == "ppc64le" and hfs_compat):
|
if arch in ("ppc", "ppc64") or (arch == "ppc64le" and hfs_compat):
|
||||||
result = [
|
result = [
|
||||||
'-part',
|
"-part",
|
||||||
'-hfs',
|
"-hfs",
|
||||||
'-r',
|
"-r",
|
||||||
'-l',
|
"-l",
|
||||||
'-sysid', 'PPC',
|
"-sysid",
|
||||||
'-no-desktop',
|
"PPC",
|
||||||
'-allow-multidot',
|
"-no-desktop",
|
||||||
'-chrp-boot',
|
"-allow-multidot",
|
||||||
"-map", os.path.join(createfrom, 'mapping'), # -map %s/ppc/mapping
|
"-chrp-boot",
|
||||||
'-hfs-bless', "/ppc/mac", # must be the last
|
"-map",
|
||||||
|
os.path.join(createfrom, "mapping"), # -map %s/ppc/mapping
|
||||||
|
"-hfs-bless",
|
||||||
|
"/ppc/mac", # must be the last
|
||||||
]
|
]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if arch == "ppc64le" and not hfs_compat:
|
if arch == "ppc64le" and not hfs_compat:
|
||||||
result = [
|
result = [
|
||||||
'-r',
|
"-r",
|
||||||
'-l',
|
"-l",
|
||||||
'-sysid', 'PPC',
|
"-sysid",
|
||||||
'-chrp-boot',
|
"PPC",
|
||||||
|
"-chrp-boot",
|
||||||
]
|
]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if arch == "sparc":
|
if arch == "sparc":
|
||||||
result = [
|
result = [
|
||||||
'-G', '/boot/isofs.b',
|
"-G",
|
||||||
'-B', '...',
|
"/boot/isofs.b",
|
||||||
'-s', '/boot/silo.conf',
|
"-B",
|
||||||
'-sparc-label', '"sparc"',
|
"...",
|
||||||
|
"-s",
|
||||||
|
"/boot/silo.conf",
|
||||||
|
"-sparc-label",
|
||||||
|
'"sparc"',
|
||||||
]
|
]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if arch in ("s390", "s390x"):
|
if arch in ("s390", "s390x"):
|
||||||
result = [
|
result = [
|
||||||
'-eltorito-boot', 'images/cdboot.img',
|
"-eltorito-boot",
|
||||||
'-no-emul-boot',
|
"images/cdboot.img",
|
||||||
|
"-no-emul-boot",
|
||||||
]
|
]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -122,7 +134,18 @@ def _truncate_volid(volid):
|
|||||||
return volid
|
return volid
|
||||||
|
|
||||||
|
|
||||||
def get_mkisofs_cmd(iso, paths, appid=None, volid=None, volset=None, exclude=None, verbose=False, boot_args=None, input_charset="utf-8", graft_points=None):
|
def get_mkisofs_cmd(
|
||||||
|
iso,
|
||||||
|
paths,
|
||||||
|
appid=None,
|
||||||
|
volid=None,
|
||||||
|
volset=None,
|
||||||
|
exclude=None,
|
||||||
|
verbose=False,
|
||||||
|
boot_args=None,
|
||||||
|
input_charset="utf-8",
|
||||||
|
graft_points=None,
|
||||||
|
):
|
||||||
# following options are always enabled
|
# following options are always enabled
|
||||||
untranslated_filenames = True
|
untranslated_filenames = True
|
||||||
translation_table = True
|
translation_table = True
|
||||||
@ -201,7 +224,7 @@ def get_checkisomd5_data(iso_path, logger=None):
|
|||||||
retcode, output = run(cmd, universal_newlines=True)
|
retcode, output = run(cmd, universal_newlines=True)
|
||||||
items = [line.strip().rsplit(":", 1) for line in output.splitlines()]
|
items = [line.strip().rsplit(":", 1) for line in output.splitlines()]
|
||||||
items = dict([(k, v.strip()) for k, v in items])
|
items = dict([(k, v.strip()) for k, v in items])
|
||||||
md5 = items.get(iso_path, '')
|
md5 = items.get(iso_path, "")
|
||||||
if len(md5) != 32:
|
if len(md5) != 32:
|
||||||
# We have seen cases where the command finished successfully, but
|
# We have seen cases where the command finished successfully, but
|
||||||
# returned garbage value. We need to handle it, otherwise there would
|
# returned garbage value. We need to handle it, otherwise there would
|
||||||
@ -209,8 +232,10 @@ def get_checkisomd5_data(iso_path, logger=None):
|
|||||||
# This only logs information about the problem and leaves the hash
|
# This only logs information about the problem and leaves the hash
|
||||||
# empty, which is valid from productmd point of view.
|
# empty, which is valid from productmd point of view.
|
||||||
if logger:
|
if logger:
|
||||||
logger.critical('Implanted MD5 in %s is not valid: %r', iso_path, md5)
|
logger.critical("Implanted MD5 in %s is not valid: %r", iso_path, md5)
|
||||||
logger.critical('Ran command %r; exit code %r; output %r', cmd, retcode, output)
|
logger.critical(
|
||||||
|
"Ran command %r; exit code %r; output %r", cmd, retcode, output
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
return items
|
return items
|
||||||
|
|
||||||
@ -231,7 +256,9 @@ def get_isohybrid_cmd(iso_path, arch):
|
|||||||
|
|
||||||
def get_manifest_cmd(iso_name):
|
def get_manifest_cmd(iso_name):
|
||||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
||||||
shlex_quote(iso_name), shlex_quote(iso_name))
|
shlex_quote(iso_name),
|
||||||
|
shlex_quote(iso_name),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_volume_id(path):
|
def get_volume_id(path):
|
||||||
@ -289,7 +316,7 @@ def _paths_from_list(root, paths):
|
|||||||
result = {}
|
result = {}
|
||||||
for i in paths:
|
for i in paths:
|
||||||
i = os.path.normpath(os.path.join(root, i))
|
i = os.path.normpath(os.path.join(root, i))
|
||||||
key = i[len(root):]
|
key = i[len(root) :]
|
||||||
result[key] = i
|
result[key] = i
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -315,7 +342,9 @@ def _scan_tree(path):
|
|||||||
def _merge_trees(tree1, tree2, exclusive=False):
|
def _merge_trees(tree1, tree2, exclusive=False):
|
||||||
# tree2 has higher priority
|
# tree2 has higher priority
|
||||||
result = tree2.copy()
|
result = tree2.copy()
|
||||||
all_dirs = set([os.path.dirname(i).rstrip("/") for i in result if os.path.dirname(i) != ""])
|
all_dirs = set(
|
||||||
|
[os.path.dirname(i).rstrip("/") for i in result if os.path.dirname(i) != ""]
|
||||||
|
)
|
||||||
|
|
||||||
for i in tree1:
|
for i in tree1:
|
||||||
dn = os.path.dirname(i)
|
dn = os.path.dirname(i)
|
||||||
@ -408,14 +437,18 @@ def mount(image, logger=None, use_guestmount=True):
|
|||||||
The yielded path will only be valid in the with block and is removed once
|
The yielded path will only be valid in the with block and is removed once
|
||||||
the image is unmounted.
|
the image is unmounted.
|
||||||
"""
|
"""
|
||||||
with util.temp_dir(prefix='iso-mount-') as mount_dir:
|
with util.temp_dir(prefix="iso-mount-") as mount_dir:
|
||||||
ret, __ = run(["which", "guestmount"], can_fail=True)
|
ret, __ = run(["which", "guestmount"], can_fail=True)
|
||||||
# return code 0 means that guestmount is available
|
# return code 0 means that guestmount is available
|
||||||
guestmount_available = use_guestmount and not bool(ret)
|
guestmount_available = use_guestmount and not bool(ret)
|
||||||
if guestmount_available:
|
if guestmount_available:
|
||||||
# use guestmount to mount the image, which doesn't require root privileges
|
# use guestmount to mount the image, which doesn't require root privileges
|
||||||
# LIBGUESTFS_BACKEND=direct: running qemu directly without libvirt
|
# LIBGUESTFS_BACKEND=direct: running qemu directly without libvirt
|
||||||
env = {'LIBGUESTFS_BACKEND': 'direct', 'LIBGUESTFS_DEBUG': '1', 'LIBGUESTFS_TRACE': '1'}
|
env = {
|
||||||
|
"LIBGUESTFS_BACKEND": "direct",
|
||||||
|
"LIBGUESTFS_DEBUG": "1",
|
||||||
|
"LIBGUESTFS_TRACE": "1",
|
||||||
|
}
|
||||||
cmd = ["guestmount", "-a", image, "-m", "/dev/sda", mount_dir]
|
cmd = ["guestmount", "-a", image, "-m", "/dev/sda", mount_dir]
|
||||||
# guestmount caches files for faster mounting. However,
|
# guestmount caches files for faster mounting. However,
|
||||||
# systemd-tmpfiles is cleaning it up if the files have not been
|
# systemd-tmpfiles is cleaning it up if the files have not been
|
||||||
@ -446,13 +479,14 @@ def mount(image, logger=None, use_guestmount=True):
|
|||||||
if ret != 0:
|
if ret != 0:
|
||||||
# The mount command failed, something is wrong. Log the output and raise an exception.
|
# The mount command failed, something is wrong. Log the output and raise an exception.
|
||||||
if logger:
|
if logger:
|
||||||
logger.error('Command %s exited with %s and output:\n%s'
|
logger.error(
|
||||||
% (cmd, ret, out))
|
"Command %s exited with %s and output:\n%s" % (cmd, ret, out)
|
||||||
raise RuntimeError('Failed to mount %s' % image)
|
)
|
||||||
|
raise RuntimeError("Failed to mount %s" % image)
|
||||||
try:
|
try:
|
||||||
yield mount_dir
|
yield mount_dir
|
||||||
finally:
|
finally:
|
||||||
if guestmount_available:
|
if guestmount_available:
|
||||||
util.run_unmount_cmd(['fusermount', '-u', mount_dir], path=mount_dir)
|
util.run_unmount_cmd(["fusermount", "-u", mount_dir], path=mount_dir)
|
||||||
else:
|
else:
|
||||||
util.run_unmount_cmd(['umount', mount_dir], path=mount_dir)
|
util.run_unmount_cmd(["umount", mount_dir], path=mount_dir)
|
||||||
|
@ -21,7 +21,9 @@ from kobo.shortcuts import force_list
|
|||||||
|
|
||||||
|
|
||||||
class JigdoWrapper(kobo.log.LoggingBase):
|
class JigdoWrapper(kobo.log.LoggingBase):
|
||||||
def get_jigdo_cmd(self, image, files, output_dir, cache=None, no_servers=False, report=None):
|
def get_jigdo_cmd(
|
||||||
|
self, image, files, output_dir, cache=None, no_servers=False, report=None
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
files: [{"path", "label", "uri"}]
|
files: [{"path", "label", "uri"}]
|
||||||
"""
|
"""
|
||||||
|
@ -30,7 +30,7 @@ from .. import util
|
|||||||
from ..arch_utils import getBaseArch
|
from ..arch_utils import getBaseArch
|
||||||
|
|
||||||
|
|
||||||
KOJI_BUILD_DELETED = koji.BUILD_STATES['DELETED']
|
KOJI_BUILD_DELETED = koji.BUILD_STATES["DELETED"]
|
||||||
|
|
||||||
|
|
||||||
class KojiWrapper(object):
|
class KojiWrapper(object):
|
||||||
@ -41,38 +41,65 @@ class KojiWrapper(object):
|
|||||||
with self.lock:
|
with self.lock:
|
||||||
self.koji_module = koji.get_profile_module(profile)
|
self.koji_module = koji.get_profile_module(profile)
|
||||||
session_opts = {}
|
session_opts = {}
|
||||||
for key in ('krbservice', 'timeout', 'keepalive',
|
for key in (
|
||||||
'max_retries', 'retry_interval', 'anon_retry',
|
"krbservice",
|
||||||
'offline_retry', 'offline_retry_interval',
|
"timeout",
|
||||||
'debug', 'debug_xmlrpc', 'krb_rdns',
|
"keepalive",
|
||||||
'serverca',
|
"max_retries",
|
||||||
'use_fast_upload'):
|
"retry_interval",
|
||||||
|
"anon_retry",
|
||||||
|
"offline_retry",
|
||||||
|
"offline_retry_interval",
|
||||||
|
"debug",
|
||||||
|
"debug_xmlrpc",
|
||||||
|
"krb_rdns",
|
||||||
|
"serverca",
|
||||||
|
"use_fast_upload",
|
||||||
|
):
|
||||||
value = getattr(self.koji_module.config, key, None)
|
value = getattr(self.koji_module.config, key, None)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
session_opts[key] = value
|
session_opts[key] = value
|
||||||
self.koji_proxy = koji.ClientSession(self.koji_module.config.server, session_opts)
|
self.koji_proxy = koji.ClientSession(
|
||||||
|
self.koji_module.config.server, session_opts
|
||||||
|
)
|
||||||
|
|
||||||
def login(self):
|
def login(self):
|
||||||
"""Authenticate to the hub."""
|
"""Authenticate to the hub."""
|
||||||
auth_type = self.koji_module.config.authtype
|
auth_type = self.koji_module.config.authtype
|
||||||
if auth_type == 'ssl' or (os.path.isfile(os.path.expanduser(self.koji_module.config.cert))
|
if auth_type == "ssl" or (
|
||||||
and auth_type is None):
|
os.path.isfile(os.path.expanduser(self.koji_module.config.cert))
|
||||||
self.koji_proxy.ssl_login(os.path.expanduser(self.koji_module.config.cert),
|
and auth_type is None
|
||||||
|
):
|
||||||
|
self.koji_proxy.ssl_login(
|
||||||
|
os.path.expanduser(self.koji_module.config.cert),
|
||||||
os.path.expanduser(self.koji_module.config.ca),
|
os.path.expanduser(self.koji_module.config.ca),
|
||||||
os.path.expanduser(self.koji_module.config.serverca))
|
os.path.expanduser(self.koji_module.config.serverca),
|
||||||
elif auth_type == 'kerberos':
|
)
|
||||||
|
elif auth_type == "kerberos":
|
||||||
self.koji_proxy.krb_login(
|
self.koji_proxy.krb_login(
|
||||||
getattr(self.koji_module.config, 'principal', None),
|
getattr(self.koji_module.config, "principal", None),
|
||||||
getattr(self.koji_module.config, 'keytab', None))
|
getattr(self.koji_module.config, "keytab", None),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('Unsupported authentication type in Koji')
|
raise RuntimeError("Unsupported authentication type in Koji")
|
||||||
|
|
||||||
def _get_cmd(self, *args):
|
def _get_cmd(self, *args):
|
||||||
return ["koji", "--profile=%s" % self.profile] + list(args)
|
return ["koji", "--profile=%s" % self.profile] + list(args)
|
||||||
|
|
||||||
def get_runroot_cmd(self, target, arch, command, quiet=False, use_shell=True,
|
def get_runroot_cmd(
|
||||||
channel=None, packages=None, mounts=None, weight=None,
|
self,
|
||||||
new_chroot=False, chown_paths=None):
|
target,
|
||||||
|
arch,
|
||||||
|
command,
|
||||||
|
quiet=False,
|
||||||
|
use_shell=True,
|
||||||
|
channel=None,
|
||||||
|
packages=None,
|
||||||
|
mounts=None,
|
||||||
|
weight=None,
|
||||||
|
new_chroot=False,
|
||||||
|
chown_paths=None,
|
||||||
|
):
|
||||||
cmd = self._get_cmd("runroot", "--nowait", "--task-id")
|
cmd = self._get_cmd("runroot", "--nowait", "--task-id")
|
||||||
|
|
||||||
if quiet:
|
if quiet:
|
||||||
@ -111,7 +138,9 @@ class KojiWrapper(object):
|
|||||||
command = " ".join([shlex_quote(i) for i in command])
|
command = " ".join([shlex_quote(i) for i in command])
|
||||||
|
|
||||||
# HACK: remove rpmdb and yum cache
|
# HACK: remove rpmdb and yum cache
|
||||||
command = "rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; " + command
|
command = (
|
||||||
|
"rm -f /var/lib/rpm/__db*; rm -rf /var/cache/yum/*; set -x; " + command
|
||||||
|
)
|
||||||
|
|
||||||
if chown_paths:
|
if chown_paths:
|
||||||
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
paths = " ".join(shlex_quote(pth) for pth in chown_paths)
|
||||||
@ -124,8 +153,16 @@ class KojiWrapper(object):
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_pungi_buildinstall_cmd(
|
def get_pungi_buildinstall_cmd(
|
||||||
self, target, arch, args, channel=None, packages=None,
|
self,
|
||||||
mounts=None, weight=None, chown_uid=None):
|
target,
|
||||||
|
arch,
|
||||||
|
args,
|
||||||
|
channel=None,
|
||||||
|
packages=None,
|
||||||
|
mounts=None,
|
||||||
|
weight=None,
|
||||||
|
chown_uid=None,
|
||||||
|
):
|
||||||
cmd = self._get_cmd("pungi-buildinstall", "--nowait", "--task-id")
|
cmd = self._get_cmd("pungi-buildinstall", "--nowait", "--task-id")
|
||||||
|
|
||||||
if channel:
|
if channel:
|
||||||
@ -171,10 +208,10 @@ class KojiWrapper(object):
|
|||||||
If we are authenticated with a keytab, we need a fresh credentials
|
If we are authenticated with a keytab, we need a fresh credentials
|
||||||
cache to avoid possible race condition.
|
cache to avoid possible race condition.
|
||||||
"""
|
"""
|
||||||
if getattr(self.koji_module.config, 'keytab', None):
|
if getattr(self.koji_module.config, "keytab", None):
|
||||||
with util.temp_dir(prefix='krb_ccache') as tempdir:
|
with util.temp_dir(prefix="krb_ccache") as tempdir:
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env['KRB5CCNAME'] = 'DIR:%s' % tempdir
|
env["KRB5CCNAME"] = "DIR:%s" % tempdir
|
||||||
yield env
|
yield env
|
||||||
else:
|
else:
|
||||||
yield None
|
yield None
|
||||||
@ -188,11 +225,17 @@ class KojiWrapper(object):
|
|||||||
"""
|
"""
|
||||||
task_id = None
|
task_id = None
|
||||||
with self.get_koji_cmd_env() as env:
|
with self.get_koji_cmd_env() as env:
|
||||||
retcode, output = run(command, can_fail=True, logfile=log_file,
|
retcode, output = run(
|
||||||
show_cmd=True, env=env, universal_newlines=True)
|
command,
|
||||||
|
can_fail=True,
|
||||||
|
logfile=log_file,
|
||||||
|
show_cmd=True,
|
||||||
|
env=env,
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
|
|
||||||
first_line = output.splitlines()[0]
|
first_line = output.splitlines()[0]
|
||||||
match = re.search(r'^(\d+)$', first_line)
|
match = re.search(r"^(\d+)$", first_line)
|
||||||
if not match:
|
if not match:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Could not find task ID in output. Command '%s' returned '%s'."
|
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||||
@ -209,7 +252,9 @@ class KojiWrapper(object):
|
|||||||
"task_id": task_id,
|
"task_id": task_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_image_build_cmd(self, config_options, conf_file_dest, wait=True, scratch=False):
|
def get_image_build_cmd(
|
||||||
|
self, config_options, conf_file_dest, wait=True, scratch=False
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
@param config_options
|
@param config_options
|
||||||
@param conf_file_dest - a destination in compose workdir for the conf file to be written
|
@param conf_file_dest - a destination in compose workdir for the conf file to be written
|
||||||
@ -219,14 +264,27 @@ class KojiWrapper(object):
|
|||||||
# Usage: koji image-build [options] <name> <version> <target> <install-tree-url> <arch> [<arch>...]
|
# Usage: koji image-build [options] <name> <version> <target> <install-tree-url> <arch> [<arch>...]
|
||||||
sub_command = "image-build"
|
sub_command = "image-build"
|
||||||
# The minimum set of options
|
# The minimum set of options
|
||||||
min_options = ("name", "version", "target", "install_tree", "arches", "format", "kickstart", "ksurl", "distro")
|
min_options = (
|
||||||
assert set(min_options).issubset(set(config_options['image-build'].keys())), "image-build requires at least %s got '%s'" % (", ".join(min_options), config_options)
|
"name",
|
||||||
|
"version",
|
||||||
|
"target",
|
||||||
|
"install_tree",
|
||||||
|
"arches",
|
||||||
|
"format",
|
||||||
|
"kickstart",
|
||||||
|
"ksurl",
|
||||||
|
"distro",
|
||||||
|
)
|
||||||
|
assert set(min_options).issubset(set(config_options["image-build"].keys())), (
|
||||||
|
"image-build requires at least %s got '%s'"
|
||||||
|
% (", ".join(min_options), config_options)
|
||||||
|
)
|
||||||
cfg_parser = configparser.ConfigParser()
|
cfg_parser = configparser.ConfigParser()
|
||||||
for section, opts in config_options.items():
|
for section, opts in config_options.items():
|
||||||
cfg_parser.add_section(section)
|
cfg_parser.add_section(section)
|
||||||
for option, value in opts.items():
|
for option, value in opts.items():
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
value = ','.join(value)
|
value = ",".join(value)
|
||||||
if not isinstance(value, six.string_types):
|
if not isinstance(value, six.string_types):
|
||||||
# Python 3 configparser will reject non-string values.
|
# Python 3 configparser will reject non-string values.
|
||||||
value = str(value)
|
value = str(value)
|
||||||
@ -246,42 +304,55 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
def get_live_media_cmd(self, options, wait=True):
|
def get_live_media_cmd(self, options, wait=True):
|
||||||
# Usage: koji spin-livemedia [options] <name> <version> <target> <arch> <kickstart-file>
|
# Usage: koji spin-livemedia [options] <name> <version> <target> <arch> <kickstart-file>
|
||||||
cmd = self._get_cmd('spin-livemedia')
|
cmd = self._get_cmd("spin-livemedia")
|
||||||
|
|
||||||
for key in ('name', 'version', 'target', 'arch', 'ksfile'):
|
for key in ("name", "version", "target", "arch", "ksfile"):
|
||||||
if key not in options:
|
if key not in options:
|
||||||
raise ValueError('Expected options to have key "%s"' % key)
|
raise ValueError('Expected options to have key "%s"' % key)
|
||||||
cmd.append(options[key])
|
cmd.append(options[key])
|
||||||
if 'install_tree' not in options:
|
if "install_tree" not in options:
|
||||||
raise ValueError('Expected options to have key "install_tree"')
|
raise ValueError('Expected options to have key "install_tree"')
|
||||||
cmd.append('--install-tree=%s' % options['install_tree'])
|
cmd.append("--install-tree=%s" % options["install_tree"])
|
||||||
|
|
||||||
for repo in options.get('repo', []):
|
for repo in options.get("repo", []):
|
||||||
cmd.append('--repo=%s' % repo)
|
cmd.append("--repo=%s" % repo)
|
||||||
|
|
||||||
if options.get('scratch'):
|
if options.get("scratch"):
|
||||||
cmd.append('--scratch')
|
cmd.append("--scratch")
|
||||||
|
|
||||||
if options.get('skip_tag'):
|
if options.get("skip_tag"):
|
||||||
cmd.append('--skip-tag')
|
cmd.append("--skip-tag")
|
||||||
|
|
||||||
if 'ksurl' in options:
|
if "ksurl" in options:
|
||||||
cmd.append('--ksurl=%s' % options['ksurl'])
|
cmd.append("--ksurl=%s" % options["ksurl"])
|
||||||
|
|
||||||
if 'release' in options:
|
if "release" in options:
|
||||||
cmd.append('--release=%s' % options['release'])
|
cmd.append("--release=%s" % options["release"])
|
||||||
|
|
||||||
if 'can_fail' in options:
|
if "can_fail" in options:
|
||||||
cmd.append('--can-fail=%s' % ','.join(options['can_fail']))
|
cmd.append("--can-fail=%s" % ",".join(options["can_fail"]))
|
||||||
|
|
||||||
if wait:
|
if wait:
|
||||||
cmd.append('--wait')
|
cmd.append("--wait")
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_create_image_cmd(self, name, version, target, arch, ks_file, repos,
|
def get_create_image_cmd(
|
||||||
image_type="live", image_format=None, release=None,
|
self,
|
||||||
wait=True, archive=False, specfile=None, ksurl=None):
|
name,
|
||||||
|
version,
|
||||||
|
target,
|
||||||
|
arch,
|
||||||
|
ks_file,
|
||||||
|
repos,
|
||||||
|
image_type="live",
|
||||||
|
image_format=None,
|
||||||
|
release=None,
|
||||||
|
wait=True,
|
||||||
|
archive=False,
|
||||||
|
specfile=None,
|
||||||
|
ksurl=None,
|
||||||
|
):
|
||||||
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
|
# Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>
|
||||||
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file>
|
# Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file>
|
||||||
# Examples:
|
# Examples:
|
||||||
@ -327,7 +398,10 @@ class KojiWrapper(object):
|
|||||||
raise ValueError("Format can be specified only for appliance images'")
|
raise ValueError("Format can be specified only for appliance images'")
|
||||||
supported_formats = ["raw", "qcow", "qcow2", "vmx"]
|
supported_formats = ["raw", "qcow", "qcow2", "vmx"]
|
||||||
if image_format not in supported_formats:
|
if image_format not in supported_formats:
|
||||||
raise ValueError("Format is not supported: %s. Supported formats: %s" % (image_format, " ".join(sorted(supported_formats))))
|
raise ValueError(
|
||||||
|
"Format is not supported: %s. Supported formats: %s"
|
||||||
|
% (image_format, " ".join(sorted(supported_formats)))
|
||||||
|
)
|
||||||
cmd.append("--format=%s" % image_format)
|
cmd.append("--format=%s" % image_format)
|
||||||
|
|
||||||
if release is not None:
|
if release is not None:
|
||||||
@ -350,23 +424,27 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
def _has_connection_error(self, output):
|
def _has_connection_error(self, output):
|
||||||
"""Checks if output indicates connection error."""
|
"""Checks if output indicates connection error."""
|
||||||
return re.search('error: failed to connect\n$', output)
|
return re.search("error: failed to connect\n$", output)
|
||||||
|
|
||||||
def _has_offline_error(self, output):
|
def _has_offline_error(self, output):
|
||||||
"""Check if output indicates server offline."""
|
"""Check if output indicates server offline."""
|
||||||
return re.search('koji: ServerOffline:', output)
|
return re.search("koji: ServerOffline:", output)
|
||||||
|
|
||||||
def _wait_for_task(self, task_id, logfile=None, max_retries=None):
|
def _wait_for_task(self, task_id, logfile=None, max_retries=None):
|
||||||
"""Tries to wait for a task to finish. On connection error it will
|
"""Tries to wait for a task to finish. On connection error it will
|
||||||
retry with `watch-task` command.
|
retry with `watch-task` command.
|
||||||
"""
|
"""
|
||||||
cmd = self._get_cmd('watch-task', str(task_id))
|
cmd = self._get_cmd("watch-task", str(task_id))
|
||||||
attempt = 0
|
attempt = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
retcode, output = run(cmd, can_fail=True, logfile=logfile, universal_newlines=True)
|
retcode, output = run(
|
||||||
|
cmd, can_fail=True, logfile=logfile, universal_newlines=True
|
||||||
|
)
|
||||||
|
|
||||||
if retcode == 0 or not (self._has_connection_error(output) or self._has_offline_error(output)):
|
if retcode == 0 or not (
|
||||||
|
self._has_connection_error(output) or self._has_offline_error(output)
|
||||||
|
):
|
||||||
# Task finished for reason other than connection error or server offline error.
|
# Task finished for reason other than connection error or server offline error.
|
||||||
return retcode, output
|
return retcode, output
|
||||||
|
|
||||||
@ -375,7 +453,9 @@ class KojiWrapper(object):
|
|||||||
break
|
break
|
||||||
time.sleep(attempt * 10)
|
time.sleep(attempt * 10)
|
||||||
|
|
||||||
raise RuntimeError('Failed to wait for task %s. Too many connection errors.' % task_id)
|
raise RuntimeError(
|
||||||
|
"Failed to wait for task %s. Too many connection errors." % task_id
|
||||||
|
)
|
||||||
|
|
||||||
def run_blocking_cmd(self, command, log_file=None, max_retries=None):
|
def run_blocking_cmd(self, command, log_file=None, max_retries=None):
|
||||||
"""
|
"""
|
||||||
@ -384,17 +464,28 @@ class KojiWrapper(object):
|
|||||||
command finishes.
|
command finishes.
|
||||||
"""
|
"""
|
||||||
with self.get_koji_cmd_env() as env:
|
with self.get_koji_cmd_env() as env:
|
||||||
retcode, output = run(command, can_fail=True, logfile=log_file,
|
retcode, output = run(
|
||||||
env=env, universal_newlines=True)
|
command,
|
||||||
|
can_fail=True,
|
||||||
|
logfile=log_file,
|
||||||
|
env=env,
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
|
|
||||||
match = re.search(r"Created task: (\d+)", output)
|
match = re.search(r"Created task: (\d+)", output)
|
||||||
if not match:
|
if not match:
|
||||||
raise RuntimeError("Could not find task ID in output. Command '%s' returned '%s'."
|
raise RuntimeError(
|
||||||
% (" ".join(command), output))
|
"Could not find task ID in output. Command '%s' returned '%s'."
|
||||||
|
% (" ".join(command), output)
|
||||||
|
)
|
||||||
task_id = int(match.groups()[0])
|
task_id = int(match.groups()[0])
|
||||||
|
|
||||||
if retcode != 0 and (self._has_connection_error(output) or self._has_offline_error(output)):
|
if retcode != 0 and (
|
||||||
retcode, output = self._wait_for_task(task_id, logfile=log_file, max_retries=max_retries)
|
self._has_connection_error(output) or self._has_offline_error(output)
|
||||||
|
):
|
||||||
|
retcode, output = self._wait_for_task(
|
||||||
|
task_id, logfile=log_file, max_retries=max_retries
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"retcode": retcode,
|
"retcode": retcode,
|
||||||
@ -403,7 +494,9 @@ class KojiWrapper(object):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def watch_task(self, task_id, log_file=None, max_retries=None):
|
def watch_task(self, task_id, log_file=None, max_retries=None):
|
||||||
retcode, _ = self._wait_for_task(task_id, logfile=log_file, max_retries=max_retries)
|
retcode, _ = self._wait_for_task(
|
||||||
|
task_id, logfile=log_file, max_retries=max_retries
|
||||||
|
)
|
||||||
return retcode
|
return retcode
|
||||||
|
|
||||||
def get_image_paths(self, task_id, callback=None):
|
def get_image_paths(self, task_id, callback=None):
|
||||||
@ -420,26 +513,32 @@ class KojiWrapper(object):
|
|||||||
children_tasks = self.koji_proxy.getTaskChildren(task_id, request=True)
|
children_tasks = self.koji_proxy.getTaskChildren(task_id, request=True)
|
||||||
|
|
||||||
for child_task in children_tasks:
|
for child_task in children_tasks:
|
||||||
if child_task['method'] not in ['createImage', 'createLiveMedia', 'createAppliance']:
|
if child_task["method"] not in [
|
||||||
|
"createImage",
|
||||||
|
"createLiveMedia",
|
||||||
|
"createAppliance",
|
||||||
|
]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if child_task['state'] != koji.TASK_STATES['CLOSED']:
|
if child_task["state"] != koji.TASK_STATES["CLOSED"]:
|
||||||
# The subtask is failed, which can happen with the can_fail
|
# The subtask is failed, which can happen with the can_fail
|
||||||
# option. If given, call the callback, and go to next child.
|
# option. If given, call the callback, and go to next child.
|
||||||
if callback:
|
if callback:
|
||||||
callback(child_task['arch'])
|
callback(child_task["arch"])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
is_scratch = child_task['request'][-1].get('scratch', False)
|
is_scratch = child_task["request"][-1].get("scratch", False)
|
||||||
task_result = self.koji_proxy.getTaskResult(child_task['id'])
|
task_result = self.koji_proxy.getTaskResult(child_task["id"])
|
||||||
|
|
||||||
if is_scratch:
|
if is_scratch:
|
||||||
topdir = os.path.join(
|
topdir = os.path.join(
|
||||||
self.koji_module.pathinfo.work(),
|
self.koji_module.pathinfo.work(),
|
||||||
self.koji_module.pathinfo.taskrelpath(child_task['id'])
|
self.koji_module.pathinfo.taskrelpath(child_task["id"]),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
build = self.koji_proxy.getImageBuild("%(name)s-%(version)s-%(release)s" % task_result)
|
build = self.koji_proxy.getImageBuild(
|
||||||
|
"%(name)s-%(version)s-%(release)s" % task_result
|
||||||
|
)
|
||||||
build["name"] = task_result["name"]
|
build["name"] = task_result["name"]
|
||||||
build["version"] = task_result["version"]
|
build["version"] = task_result["version"]
|
||||||
build["release"] = task_result["release"]
|
build["release"] = task_result["release"]
|
||||||
@ -447,7 +546,9 @@ class KojiWrapper(object):
|
|||||||
topdir = self.koji_module.pathinfo.imagebuild(build)
|
topdir = self.koji_module.pathinfo.imagebuild(build)
|
||||||
|
|
||||||
for i in task_result["files"]:
|
for i in task_result["files"]:
|
||||||
result.setdefault(task_result['arch'], []).append(os.path.join(topdir, i))
|
result.setdefault(task_result["arch"], []).append(
|
||||||
|
os.path.join(topdir, i)
|
||||||
|
)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -460,7 +561,7 @@ class KojiWrapper(object):
|
|||||||
# scan parent and child tasks for certain methods
|
# scan parent and child tasks for certain methods
|
||||||
task_info = None
|
task_info = None
|
||||||
for i in task_info_list:
|
for i in task_info_list:
|
||||||
if i["method"] in ("createAppliance", "createLiveCD", 'createImage'):
|
if i["method"] in ("createAppliance", "createLiveCD", "createImage"):
|
||||||
task_info = i
|
task_info = i
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -469,9 +570,14 @@ class KojiWrapper(object):
|
|||||||
task_result.pop("rpmlist", None)
|
task_result.pop("rpmlist", None)
|
||||||
|
|
||||||
if scratch:
|
if scratch:
|
||||||
topdir = os.path.join(self.koji_module.pathinfo.work(), self.koji_module.pathinfo.taskrelpath(task_info["id"]))
|
topdir = os.path.join(
|
||||||
|
self.koji_module.pathinfo.work(),
|
||||||
|
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
build = self.koji_proxy.getImageBuild("%(name)s-%(version)s-%(release)s" % task_result)
|
build = self.koji_proxy.getImageBuild(
|
||||||
|
"%(name)s-%(version)s-%(release)s" % task_result
|
||||||
|
)
|
||||||
build["name"] = task_result["name"]
|
build["name"] = task_result["name"]
|
||||||
build["version"] = task_result["version"]
|
build["version"] = task_result["version"]
|
||||||
build["release"] = task_result["release"]
|
build["release"] = task_result["release"]
|
||||||
@ -501,7 +607,10 @@ class KojiWrapper(object):
|
|||||||
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
task_result = self.koji_proxy.getTaskResult(task_info["id"])
|
||||||
|
|
||||||
# Get koji dir with results (rpms, srpms, logs, ...)
|
# Get koji dir with results (rpms, srpms, logs, ...)
|
||||||
topdir = os.path.join(self.koji_module.pathinfo.work(), self.koji_module.pathinfo.taskrelpath(task_info["id"]))
|
topdir = os.path.join(
|
||||||
|
self.koji_module.pathinfo.work(),
|
||||||
|
self.koji_module.pathinfo.taskrelpath(task_info["id"]),
|
||||||
|
)
|
||||||
|
|
||||||
# TODO: Maybe use different approach for non-scratch builds - see get_image_path()
|
# TODO: Maybe use different approach for non-scratch builds - see get_image_path()
|
||||||
|
|
||||||
@ -550,7 +659,10 @@ class KojiWrapper(object):
|
|||||||
for i in result_files:
|
for i in result_files:
|
||||||
rpminfo = self.koji_proxy.getRPM(i)
|
rpminfo = self.koji_proxy.getRPM(i)
|
||||||
build = self.koji_proxy.getBuild(rpminfo["build_id"])
|
build = self.koji_proxy.getBuild(rpminfo["build_id"])
|
||||||
path = os.path.join(self.koji_module.pathinfo.build(build), self.koji_module.pathinfo.signed(rpminfo, sigkey))
|
path = os.path.join(
|
||||||
|
self.koji_module.pathinfo.build(build),
|
||||||
|
self.koji_module.pathinfo.signed(rpminfo, sigkey),
|
||||||
|
)
|
||||||
result.append(path)
|
result.append(path)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
@ -559,7 +671,9 @@ class KojiWrapper(object):
|
|||||||
builds = self.koji_proxy.listBuilds(taskID=task_id)
|
builds = self.koji_proxy.listBuilds(taskID=task_id)
|
||||||
return [build.get("nvr") for build in builds if build.get("nvr")]
|
return [build.get("nvr") for build in builds if build.get("nvr")]
|
||||||
|
|
||||||
def multicall_map(self, koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None):
|
def multicall_map(
|
||||||
|
self, koji_session, koji_session_fnc, list_of_args=None, list_of_kwargs=None
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Calls the `koji_session_fnc` using Koji multicall feature N times based on the list of
|
Calls the `koji_session_fnc` using Koji multicall feature N times based on the list of
|
||||||
arguments passed in `list_of_args` and `list_of_kwargs`.
|
arguments passed in `list_of_args` and `list_of_kwargs`.
|
||||||
@ -578,8 +692,10 @@ class KojiWrapper(object):
|
|||||||
if list_of_args is None and list_of_kwargs is None:
|
if list_of_args is None and list_of_kwargs is None:
|
||||||
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
||||||
|
|
||||||
if (type(list_of_args) not in [type(None), list] or
|
if type(list_of_args) not in [type(None), list] or type(list_of_kwargs) not in [
|
||||||
type(list_of_kwargs) not in [type(None), list]):
|
type(None),
|
||||||
|
list,
|
||||||
|
]:
|
||||||
raise ValueError("list_of_args and list_of_kwargs must be list or None.")
|
raise ValueError("list_of_args and list_of_kwargs must be list or None.")
|
||||||
|
|
||||||
if list_of_kwargs is None:
|
if list_of_kwargs is None:
|
||||||
@ -588,7 +704,9 @@ class KojiWrapper(object):
|
|||||||
list_of_args = [[]] * len(list_of_kwargs)
|
list_of_args = [[]] * len(list_of_kwargs)
|
||||||
|
|
||||||
if len(list_of_args) != len(list_of_kwargs):
|
if len(list_of_args) != len(list_of_kwargs):
|
||||||
raise ValueError("Length of list_of_args and list_of_kwargs must be the same.")
|
raise ValueError(
|
||||||
|
"Length of list_of_args and list_of_kwargs must be the same."
|
||||||
|
)
|
||||||
|
|
||||||
koji_session.multicall = True
|
koji_session.multicall = True
|
||||||
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
||||||
@ -604,8 +722,9 @@ class KojiWrapper(object):
|
|||||||
return None
|
return None
|
||||||
if type(responses) != list:
|
if type(responses) != list:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Fault element was returned for multicall of method %r: %r" % (
|
"Fault element was returned for multicall of method %r: %r"
|
||||||
koji_session_fnc, responses))
|
% (koji_session_fnc, responses)
|
||||||
|
)
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
@ -619,13 +738,15 @@ class KojiWrapper(object):
|
|||||||
if type(response) == list:
|
if type(response) == list:
|
||||||
if not response:
|
if not response:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Empty list returned for multicall of method %r with args %r, %r" % (
|
"Empty list returned for multicall of method %r with args %r, %r"
|
||||||
koji_session_fnc, args, kwargs))
|
% (koji_session_fnc, args, kwargs)
|
||||||
|
)
|
||||||
results.append(response[0])
|
results.append(response[0])
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Unexpected data returned for multicall of method %r with args %r, %r: %r" % (
|
"Unexpected data returned for multicall of method %r with args %r, %r: %r"
|
||||||
koji_session_fnc, args, kwargs, response))
|
% (koji_session_fnc, args, kwargs, response)
|
||||||
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@ -645,12 +766,14 @@ def get_buildroot_rpms(compose, task_id):
|
|||||||
result = []
|
result = []
|
||||||
if task_id:
|
if task_id:
|
||||||
# runroot
|
# runroot
|
||||||
koji = KojiWrapper(compose.conf['koji_profile'])
|
koji = KojiWrapper(compose.conf["koji_profile"])
|
||||||
buildroot_infos = koji.koji_proxy.listBuildroots(taskID=task_id)
|
buildroot_infos = koji.koji_proxy.listBuildroots(taskID=task_id)
|
||||||
if not buildroot_infos:
|
if not buildroot_infos:
|
||||||
children_tasks = koji.koji_proxy.getTaskChildren(task_id)
|
children_tasks = koji.koji_proxy.getTaskChildren(task_id)
|
||||||
for child_task in children_tasks:
|
for child_task in children_tasks:
|
||||||
buildroot_infos = koji.koji_proxy.listBuildroots(taskID=child_task["id"])
|
buildroot_infos = koji.koji_proxy.listBuildroots(
|
||||||
|
taskID=child_task["id"]
|
||||||
|
)
|
||||||
if buildroot_infos:
|
if buildroot_infos:
|
||||||
break
|
break
|
||||||
buildroot_info = buildroot_infos[-1]
|
buildroot_info = buildroot_infos[-1]
|
||||||
@ -660,8 +783,10 @@ def get_buildroot_rpms(compose, task_id):
|
|||||||
result.append(fmt % rpm_info)
|
result.append(fmt % rpm_info)
|
||||||
else:
|
else:
|
||||||
# local
|
# local
|
||||||
retcode, output = run("rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
|
retcode, output = run(
|
||||||
universal_newlines=True)
|
"rpm -qa --qf='%{name}-%{version}-%{release}.%{arch}\n'",
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
for i in output.splitlines():
|
for i in output.splitlines():
|
||||||
if not i:
|
if not i:
|
||||||
continue
|
continue
|
||||||
|
@ -21,14 +21,29 @@ from ..util import process_args
|
|||||||
|
|
||||||
|
|
||||||
class LoraxWrapper(object):
|
class LoraxWrapper(object):
|
||||||
def get_lorax_cmd(self, product, version, release, repo_baseurl, output_dir,
|
def get_lorax_cmd(
|
||||||
variant=None, bugurl=None, nomacboot=False, noupgrade=False,
|
self,
|
||||||
is_final=False, buildarch=None, volid=None, buildinstallpackages=None,
|
product,
|
||||||
add_template=None, add_arch_template=None,
|
version,
|
||||||
add_template_var=None, add_arch_template_var=None,
|
release,
|
||||||
|
repo_baseurl,
|
||||||
|
output_dir,
|
||||||
|
variant=None,
|
||||||
|
bugurl=None,
|
||||||
|
nomacboot=False,
|
||||||
|
noupgrade=False,
|
||||||
|
is_final=False,
|
||||||
|
buildarch=None,
|
||||||
|
volid=None,
|
||||||
|
buildinstallpackages=None,
|
||||||
|
add_template=None,
|
||||||
|
add_arch_template=None,
|
||||||
|
add_template_var=None,
|
||||||
|
add_arch_template_var=None,
|
||||||
rootfs_size=None,
|
rootfs_size=None,
|
||||||
log_dir=None,
|
log_dir=None,
|
||||||
dracut_args=None):
|
dracut_args=None,
|
||||||
|
):
|
||||||
cmd = ["lorax"]
|
cmd = ["lorax"]
|
||||||
cmd.append("--product=%s" % product)
|
cmd.append("--product=%s" % product)
|
||||||
cmd.append("--version=%s" % version)
|
cmd.append("--version=%s" % version)
|
||||||
@ -60,17 +75,17 @@ class LoraxWrapper(object):
|
|||||||
if volid:
|
if volid:
|
||||||
cmd.append("--volid=%s" % volid)
|
cmd.append("--volid=%s" % volid)
|
||||||
|
|
||||||
cmd.extend(process_args('--installpkgs=%s', buildinstallpackages))
|
cmd.extend(process_args("--installpkgs=%s", buildinstallpackages))
|
||||||
cmd.extend(process_args('--add-template=%s', add_template))
|
cmd.extend(process_args("--add-template=%s", add_template))
|
||||||
cmd.extend(process_args('--add-arch-template=%s', add_arch_template))
|
cmd.extend(process_args("--add-arch-template=%s", add_arch_template))
|
||||||
cmd.extend(process_args('--add-template-var=%s', add_template_var))
|
cmd.extend(process_args("--add-template-var=%s", add_template_var))
|
||||||
cmd.extend(process_args('--add-arch-template-var=%s', add_arch_template_var))
|
cmd.extend(process_args("--add-arch-template-var=%s", add_arch_template_var))
|
||||||
|
|
||||||
if log_dir:
|
if log_dir:
|
||||||
cmd.append('--logfile=%s' % os.path.join(log_dir, 'lorax.log'))
|
cmd.append("--logfile=%s" % os.path.join(log_dir, "lorax.log"))
|
||||||
|
|
||||||
if rootfs_size is not None:
|
if rootfs_size is not None:
|
||||||
cmd.append('--rootfs-size=%s' % (rootfs_size))
|
cmd.append("--rootfs-size=%s" % (rootfs_size))
|
||||||
|
|
||||||
for i in force_list(dracut_args or []):
|
for i in force_list(dracut_args or []):
|
||||||
cmd.append("--dracut-arg=%s" % i)
|
cmd.append("--dracut-arg=%s" % i)
|
||||||
@ -82,7 +97,22 @@ class LoraxWrapper(object):
|
|||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_buildinstall_cmd(self, product, version, release, repo_baseurl, output_dir, variant=None, bugurl=None, nomacboot=False, noupgrade=False, is_final=False, buildarch=None, volid=None, brand=None):
|
def get_buildinstall_cmd(
|
||||||
|
self,
|
||||||
|
product,
|
||||||
|
version,
|
||||||
|
release,
|
||||||
|
repo_baseurl,
|
||||||
|
output_dir,
|
||||||
|
variant=None,
|
||||||
|
bugurl=None,
|
||||||
|
nomacboot=False,
|
||||||
|
noupgrade=False,
|
||||||
|
is_final=False,
|
||||||
|
buildarch=None,
|
||||||
|
volid=None,
|
||||||
|
brand=None,
|
||||||
|
):
|
||||||
# RHEL 6 compatibility
|
# RHEL 6 compatibility
|
||||||
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root>
|
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root>
|
||||||
|
|
||||||
|
@ -29,7 +29,9 @@ PACKAGES_RE = {
|
|||||||
|
|
||||||
UNRESOLVED_DEPENDENCY_RE = re.compile(r"^.*Unresolvable dependency (.+) in ([^ ]+).*$")
|
UNRESOLVED_DEPENDENCY_RE = re.compile(r"^.*Unresolvable dependency (.+) in ([^ ]+).*$")
|
||||||
|
|
||||||
MISSING_COMPS_PACKAGE_RE = re.compile(r"^.*Could not find a match for (.+) in any configured repo")
|
MISSING_COMPS_PACKAGE_RE = re.compile(
|
||||||
|
r"^.*Could not find a match for (.+) in any configured repo"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _write_ks_section(f, section, lines):
|
def _write_ks_section(f, section, lines):
|
||||||
@ -42,12 +44,20 @@ def _write_ks_section(f, section, lines):
|
|||||||
|
|
||||||
|
|
||||||
class PungiWrapper(object):
|
class PungiWrapper(object):
|
||||||
|
def write_kickstart(
|
||||||
def write_kickstart(self, ks_path, repos, groups, packages,
|
self,
|
||||||
exclude_packages=None, comps_repo=None,
|
ks_path,
|
||||||
lookaside_repos=None, fulltree_excludes=None,
|
repos,
|
||||||
multilib_blacklist=None, multilib_whitelist=None,
|
groups,
|
||||||
prepopulate=None):
|
packages,
|
||||||
|
exclude_packages=None,
|
||||||
|
comps_repo=None,
|
||||||
|
lookaside_repos=None,
|
||||||
|
fulltree_excludes=None,
|
||||||
|
multilib_blacklist=None,
|
||||||
|
multilib_whitelist=None,
|
||||||
|
prepopulate=None,
|
||||||
|
):
|
||||||
groups = groups or []
|
groups = groups or []
|
||||||
exclude_packages = exclude_packages or {}
|
exclude_packages = exclude_packages or {}
|
||||||
lookaside_repos = lookaside_repos or {}
|
lookaside_repos = lookaside_repos or {}
|
||||||
@ -95,7 +105,25 @@ class PungiWrapper(object):
|
|||||||
|
|
||||||
kickstart.close()
|
kickstart.close()
|
||||||
|
|
||||||
def get_pungi_cmd(self, config, destdir, name, version=None, flavor=None, selfhosting=False, fulltree=False, greedy=None, nodeps=False, nodownload=True, full_archlist=False, arch=None, cache_dir=None, lookaside_repos=None, multilib_methods=None, profiler=False):
|
def get_pungi_cmd(
|
||||||
|
self,
|
||||||
|
config,
|
||||||
|
destdir,
|
||||||
|
name,
|
||||||
|
version=None,
|
||||||
|
flavor=None,
|
||||||
|
selfhosting=False,
|
||||||
|
fulltree=False,
|
||||||
|
greedy=None,
|
||||||
|
nodeps=False,
|
||||||
|
nodownload=True,
|
||||||
|
full_archlist=False,
|
||||||
|
arch=None,
|
||||||
|
cache_dir=None,
|
||||||
|
lookaside_repos=None,
|
||||||
|
multilib_methods=None,
|
||||||
|
profiler=False,
|
||||||
|
):
|
||||||
cmd = ["pungi"]
|
cmd = ["pungi"]
|
||||||
|
|
||||||
# Gather stage
|
# Gather stage
|
||||||
@ -155,7 +183,25 @@ class PungiWrapper(object):
|
|||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_pungi_cmd_dnf(self, config, destdir, name, version=None, flavor=None, selfhosting=False, fulltree=False, greedy=None, nodeps=False, nodownload=True, full_archlist=False, arch=None, cache_dir=None, lookaside_repos=None, multilib_methods=None, profiler=False):
|
def get_pungi_cmd_dnf(
|
||||||
|
self,
|
||||||
|
config,
|
||||||
|
destdir,
|
||||||
|
name,
|
||||||
|
version=None,
|
||||||
|
flavor=None,
|
||||||
|
selfhosting=False,
|
||||||
|
fulltree=False,
|
||||||
|
greedy=None,
|
||||||
|
nodeps=False,
|
||||||
|
nodownload=True,
|
||||||
|
full_archlist=False,
|
||||||
|
arch=None,
|
||||||
|
cache_dir=None,
|
||||||
|
lookaside_repos=None,
|
||||||
|
multilib_methods=None,
|
||||||
|
profiler=False,
|
||||||
|
):
|
||||||
cmd = ["pungi-gather"]
|
cmd = ["pungi-gather"]
|
||||||
|
|
||||||
# path to a kickstart file
|
# path to a kickstart file
|
||||||
@ -223,39 +269,51 @@ class PungiWrapper(object):
|
|||||||
|
|
||||||
return packages, broken_deps, missing_comps
|
return packages, broken_deps, missing_comps
|
||||||
|
|
||||||
def run_pungi(self, ks_file, destdir, name, selfhosting=False, fulltree=False,
|
def run_pungi(
|
||||||
greedy='', cache_dir=None, arch='', multilib_methods=[],
|
self,
|
||||||
nodeps=False, lookaside_repos=[]):
|
ks_file,
|
||||||
|
destdir,
|
||||||
|
name,
|
||||||
|
selfhosting=False,
|
||||||
|
fulltree=False,
|
||||||
|
greedy="",
|
||||||
|
cache_dir=None,
|
||||||
|
arch="",
|
||||||
|
multilib_methods=[],
|
||||||
|
nodeps=False,
|
||||||
|
lookaside_repos=[],
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
This is a replacement for get_pungi_cmd that runs it in-process. Not
|
This is a replacement for get_pungi_cmd that runs it in-process. Not
|
||||||
all arguments are supported.
|
all arguments are supported.
|
||||||
"""
|
"""
|
||||||
from .. import ks, gather, config
|
from .. import ks, gather, config
|
||||||
|
|
||||||
ksparser = ks.get_ksparser(ks_path=ks_file)
|
ksparser = ks.get_ksparser(ks_path=ks_file)
|
||||||
cfg = config.Config()
|
cfg = config.Config()
|
||||||
cfg.set('pungi', 'destdir', destdir)
|
cfg.set("pungi", "destdir", destdir)
|
||||||
cfg.set('pungi', 'family', name)
|
cfg.set("pungi", "family", name)
|
||||||
cfg.set('pungi', 'iso_basename', name)
|
cfg.set("pungi", "iso_basename", name)
|
||||||
cfg.set('pungi', 'fulltree', str(fulltree))
|
cfg.set("pungi", "fulltree", str(fulltree))
|
||||||
cfg.set('pungi', 'selfhosting', str(selfhosting))
|
cfg.set("pungi", "selfhosting", str(selfhosting))
|
||||||
cfg.set('pungi', 'cachedir', cache_dir)
|
cfg.set("pungi", "cachedir", cache_dir)
|
||||||
cfg.set('pungi', 'full_archlist', "True")
|
cfg.set("pungi", "full_archlist", "True")
|
||||||
cfg.set('pungi', 'workdirbase', "%s/work" % destdir)
|
cfg.set("pungi", "workdirbase", "%s/work" % destdir)
|
||||||
cfg.set('pungi', 'greedy', greedy)
|
cfg.set("pungi", "greedy", greedy)
|
||||||
cfg.set('pungi', 'nosource', 'False')
|
cfg.set("pungi", "nosource", "False")
|
||||||
cfg.set('pungi', 'nodebuginfo', 'False')
|
cfg.set("pungi", "nodebuginfo", "False")
|
||||||
cfg.set('pungi', 'force', 'False')
|
cfg.set("pungi", "force", "False")
|
||||||
cfg.set('pungi', 'resolve_deps', str(not nodeps))
|
cfg.set("pungi", "resolve_deps", str(not nodeps))
|
||||||
if arch:
|
if arch:
|
||||||
cfg.set('pungi', 'arch', arch)
|
cfg.set("pungi", "arch", arch)
|
||||||
if multilib_methods:
|
if multilib_methods:
|
||||||
cfg.set('pungi', 'multilib', " ".join(multilib_methods))
|
cfg.set("pungi", "multilib", " ".join(multilib_methods))
|
||||||
if lookaside_repos:
|
if lookaside_repos:
|
||||||
cfg.set('pungi', 'lookaside_repos', " ".join(lookaside_repos))
|
cfg.set("pungi", "lookaside_repos", " ".join(lookaside_repos))
|
||||||
|
|
||||||
mypungi = gather.Pungi(cfg, ksparser)
|
mypungi = gather.Pungi(cfg, ksparser)
|
||||||
|
|
||||||
with open(os.path.join(destdir, 'out'), 'w') as f:
|
with open(os.path.join(destdir, "out"), "w") as f:
|
||||||
with mypungi.yumlock:
|
with mypungi.yumlock:
|
||||||
mypungi._inityum()
|
mypungi._inityum()
|
||||||
mypungi.gather()
|
mypungi.gather()
|
||||||
|
@ -19,15 +19,23 @@ import os
|
|||||||
from kobo.shortcuts import force_list
|
from kobo.shortcuts import force_list
|
||||||
|
|
||||||
|
|
||||||
def get_repoclosure_cmd(backend='yum', arch=None, repos=None, lookaside=None):
|
def get_repoclosure_cmd(backend="yum", arch=None, repos=None, lookaside=None):
|
||||||
cmds = {
|
cmds = {
|
||||||
'yum': {'cmd': ['/usr/bin/repoclosure', '--tempcache'], 'repoarg': '--repoid=%s', 'lookaside': '--lookaside=%s'},
|
"yum": {
|
||||||
'dnf': {'cmd': ['dnf', 'repoclosure'], 'repoarg': '--repo=%s', 'lookaside': '--repo=%s'},
|
"cmd": ["/usr/bin/repoclosure", "--tempcache"],
|
||||||
|
"repoarg": "--repoid=%s",
|
||||||
|
"lookaside": "--lookaside=%s",
|
||||||
|
},
|
||||||
|
"dnf": {
|
||||||
|
"cmd": ["dnf", "repoclosure"],
|
||||||
|
"repoarg": "--repo=%s",
|
||||||
|
"lookaside": "--repo=%s",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
cmd = cmds[backend]['cmd']
|
cmd = cmds[backend]["cmd"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise RuntimeError('Unknown repoclosure backend: %s' % backend)
|
raise RuntimeError("Unknown repoclosure backend: %s" % backend)
|
||||||
|
|
||||||
# There are options that are not exposed here, because we don't need
|
# There are options that are not exposed here, because we don't need
|
||||||
# them.
|
# them.
|
||||||
@ -38,17 +46,17 @@ def get_repoclosure_cmd(backend='yum', arch=None, repos=None, lookaside=None):
|
|||||||
repos = repos or {}
|
repos = repos or {}
|
||||||
for repo_id, repo_path in repos.items():
|
for repo_id, repo_path in repos.items():
|
||||||
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
||||||
cmd.append(cmds[backend]['repoarg'] % repo_id)
|
cmd.append(cmds[backend]["repoarg"] % repo_id)
|
||||||
if backend == 'dnf':
|
if backend == "dnf":
|
||||||
# For dnf we want to add all repos with the --repo option (which
|
# For dnf we want to add all repos with the --repo option (which
|
||||||
# enables only those and not any system repo), and the repos to
|
# enables only those and not any system repo), and the repos to
|
||||||
# check are also listed with the --check option.
|
# check are also listed with the --check option.
|
||||||
cmd.append('--check=%s' % repo_id)
|
cmd.append("--check=%s" % repo_id)
|
||||||
|
|
||||||
lookaside = lookaside or {}
|
lookaside = lookaside or {}
|
||||||
for repo_id, repo_path in lookaside.items():
|
for repo_id, repo_path in lookaside.items():
|
||||||
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
cmd.append("--repofrompath=%s,%s" % (repo_id, _to_url(repo_path)))
|
||||||
cmd.append(cmds[backend]['lookaside'] % repo_id)
|
cmd.append(cmds[backend]["lookaside"] % repo_id)
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
@ -25,8 +26,7 @@ from fnmatch import fnmatch
|
|||||||
|
|
||||||
import kobo.log
|
import kobo.log
|
||||||
from kobo.shortcuts import run, force_list
|
from kobo.shortcuts import run, force_list
|
||||||
from pungi.util import (explode_rpm_package, makedirs, copy_all, temp_dir,
|
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
||||||
retry)
|
|
||||||
from .kojiwrapper import KojiWrapper
|
from .kojiwrapper import KojiWrapper
|
||||||
|
|
||||||
|
|
||||||
@ -55,31 +55,34 @@ class ScmBase(kobo.log.LoggingBase):
|
|||||||
universal_newlines=True,
|
universal_newlines=True,
|
||||||
)
|
)
|
||||||
if retcode != 0:
|
if retcode != 0:
|
||||||
self.log_error('Output was: %r' % output)
|
self.log_error("Output was: %r" % output)
|
||||||
raise RuntimeError('%r failed with exit code %s'
|
raise RuntimeError(
|
||||||
% (self.command, retcode))
|
"%r failed with exit code %s" % (self.command, retcode)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class FileWrapper(ScmBase):
|
class FileWrapper(ScmBase):
|
||||||
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
||||||
self.log_debug("Exporting directory %s from current working directory..."
|
self.log_debug(
|
||||||
% (scm_dir))
|
"Exporting directory %s from current working directory..." % (scm_dir)
|
||||||
|
)
|
||||||
if scm_root:
|
if scm_root:
|
||||||
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
||||||
dirs = glob.glob(scm_dir)
|
dirs = glob.glob(scm_dir)
|
||||||
if not dirs:
|
if not dirs:
|
||||||
raise RuntimeError('No directories matched, can not export.')
|
raise RuntimeError("No directories matched, can not export.")
|
||||||
for i in dirs:
|
for i in dirs:
|
||||||
copy_all(i, target_dir)
|
copy_all(i, target_dir)
|
||||||
|
|
||||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||||
if scm_root:
|
if scm_root:
|
||||||
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
||||||
self.log_debug("Exporting file %s from current working directory..."
|
self.log_debug(
|
||||||
% (scm_file))
|
"Exporting file %s from current working directory..." % (scm_file)
|
||||||
|
)
|
||||||
files = glob.glob(scm_file)
|
files = glob.glob(scm_file)
|
||||||
if not files:
|
if not files:
|
||||||
raise RuntimeError('No files matched, can not export.')
|
raise RuntimeError("No files matched, can not export.")
|
||||||
for i in files:
|
for i in files:
|
||||||
target_path = os.path.join(target_dir, os.path.basename(i))
|
target_path = os.path.join(target_dir, os.path.basename(i))
|
||||||
shutil.copy2(i, target_path)
|
shutil.copy2(i, target_path)
|
||||||
@ -90,10 +93,24 @@ class CvsWrapper(ScmBase):
|
|||||||
scm_dir = scm_dir.lstrip("/")
|
scm_dir = scm_dir.lstrip("/")
|
||||||
scm_branch = scm_branch or "HEAD"
|
scm_branch = scm_branch or "HEAD"
|
||||||
with temp_dir() as tmp_dir:
|
with temp_dir() as tmp_dir:
|
||||||
self.log_debug("Exporting directory %s from CVS %s (branch %s)..."
|
self.log_debug(
|
||||||
% (scm_dir, scm_root, scm_branch))
|
"Exporting directory %s from CVS %s (branch %s)..."
|
||||||
self.retry_run(["/usr/bin/cvs", "-q", "-d", scm_root, "export", "-r", scm_branch, scm_dir],
|
% (scm_dir, scm_root, scm_branch)
|
||||||
workdir=tmp_dir, show_cmd=True)
|
)
|
||||||
|
self.retry_run(
|
||||||
|
[
|
||||||
|
"/usr/bin/cvs",
|
||||||
|
"-q",
|
||||||
|
"-d",
|
||||||
|
scm_root,
|
||||||
|
"export",
|
||||||
|
"-r",
|
||||||
|
scm_branch,
|
||||||
|
scm_dir,
|
||||||
|
],
|
||||||
|
workdir=tmp_dir,
|
||||||
|
show_cmd=True,
|
||||||
|
)
|
||||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||||
|
|
||||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||||
@ -101,16 +118,30 @@ class CvsWrapper(ScmBase):
|
|||||||
scm_branch = scm_branch or "HEAD"
|
scm_branch = scm_branch or "HEAD"
|
||||||
with temp_dir() as tmp_dir:
|
with temp_dir() as tmp_dir:
|
||||||
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
||||||
self.log_debug("Exporting file %s from CVS %s (branch %s)..." % (scm_file, scm_root, scm_branch))
|
self.log_debug(
|
||||||
self.retry_run(["/usr/bin/cvs", "-q", "-d", scm_root, "export", "-r", scm_branch, scm_file],
|
"Exporting file %s from CVS %s (branch %s)..."
|
||||||
workdir=tmp_dir, show_cmd=True)
|
% (scm_file, scm_root, scm_branch)
|
||||||
|
)
|
||||||
|
self.retry_run(
|
||||||
|
[
|
||||||
|
"/usr/bin/cvs",
|
||||||
|
"-q",
|
||||||
|
"-d",
|
||||||
|
scm_root,
|
||||||
|
"export",
|
||||||
|
"-r",
|
||||||
|
scm_branch,
|
||||||
|
scm_file,
|
||||||
|
],
|
||||||
|
workdir=tmp_dir,
|
||||||
|
show_cmd=True,
|
||||||
|
)
|
||||||
|
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
||||||
|
|
||||||
|
|
||||||
class GitWrapper(ScmBase):
|
class GitWrapper(ScmBase):
|
||||||
|
|
||||||
def _clone(self, repo, branch, destdir):
|
def _clone(self, repo, branch, destdir):
|
||||||
"""Get a single commit from a repository.
|
"""Get a single commit from a repository.
|
||||||
|
|
||||||
@ -142,8 +173,10 @@ class GitWrapper(ScmBase):
|
|||||||
scm_branch = scm_branch or "master"
|
scm_branch = scm_branch or "master"
|
||||||
|
|
||||||
with temp_dir() as tmp_dir:
|
with temp_dir() as tmp_dir:
|
||||||
self.log_debug("Exporting directory %s from git %s (branch %s)..."
|
self.log_debug(
|
||||||
% (scm_dir, scm_root, scm_branch))
|
"Exporting directory %s from git %s (branch %s)..."
|
||||||
|
% (scm_dir, scm_root, scm_branch)
|
||||||
|
)
|
||||||
|
|
||||||
self._clone(scm_root, scm_branch, tmp_dir)
|
self._clone(scm_root, scm_branch, tmp_dir)
|
||||||
|
|
||||||
@ -156,8 +189,10 @@ class GitWrapper(ScmBase):
|
|||||||
with temp_dir() as tmp_dir:
|
with temp_dir() as tmp_dir:
|
||||||
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
||||||
|
|
||||||
self.log_debug("Exporting file %s from git %s (branch %s)..."
|
self.log_debug(
|
||||||
% (scm_file, scm_root, scm_branch))
|
"Exporting file %s from git %s (branch %s)..."
|
||||||
|
% (scm_file, scm_root, scm_branch)
|
||||||
|
)
|
||||||
|
|
||||||
self._clone(scm_root, scm_branch, tmp_dir)
|
self._clone(scm_root, scm_branch, tmp_dir)
|
||||||
|
|
||||||
@ -175,7 +210,9 @@ class RpmScmWrapper(ScmBase):
|
|||||||
for rpm in self._list_rpms(scm_root):
|
for rpm in self._list_rpms(scm_root):
|
||||||
scm_dir = scm_dir.lstrip("/")
|
scm_dir = scm_dir.lstrip("/")
|
||||||
with temp_dir() as tmp_dir:
|
with temp_dir() as tmp_dir:
|
||||||
self.log_debug("Extracting directory %s from RPM package %s..." % (scm_dir, rpm))
|
self.log_debug(
|
||||||
|
"Extracting directory %s from RPM package %s..." % (scm_dir, rpm)
|
||||||
|
)
|
||||||
explode_rpm_package(rpm, tmp_dir)
|
explode_rpm_package(rpm, tmp_dir)
|
||||||
|
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
@ -183,14 +220,21 @@ class RpmScmWrapper(ScmBase):
|
|||||||
if scm_dir.endswith("/"):
|
if scm_dir.endswith("/"):
|
||||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||||
else:
|
else:
|
||||||
run("cp -a %s %s/" % (shlex_quote(os.path.join(tmp_dir, scm_dir)),
|
run(
|
||||||
shlex_quote(target_dir)))
|
"cp -a %s %s/"
|
||||||
|
% (
|
||||||
|
shlex_quote(os.path.join(tmp_dir, scm_dir)),
|
||||||
|
shlex_quote(target_dir),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
||||||
for rpm in self._list_rpms(scm_root):
|
for rpm in self._list_rpms(scm_root):
|
||||||
scm_file = scm_file.lstrip("/")
|
scm_file = scm_file.lstrip("/")
|
||||||
with temp_dir() as tmp_dir:
|
with temp_dir() as tmp_dir:
|
||||||
self.log_debug("Exporting file %s from RPM file %s..." % (scm_file, rpm))
|
self.log_debug(
|
||||||
|
"Exporting file %s from RPM file %s..." % (scm_file, rpm)
|
||||||
|
)
|
||||||
explode_rpm_package(rpm, tmp_dir)
|
explode_rpm_package(rpm, tmp_dir)
|
||||||
|
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
@ -232,9 +276,7 @@ class KojiScmWrapper(ScmBase):
|
|||||||
self._download_build(builds[0], file_pattern, target_dir)
|
self._download_build(builds[0], file_pattern, target_dir)
|
||||||
|
|
||||||
def _get_from_build(self, build_id, file_pattern, target_dir):
|
def _get_from_build(self, build_id, file_pattern, target_dir):
|
||||||
self.log_debug(
|
self.log_debug("Exporting file %s from Koji build %s", file_pattern, build_id)
|
||||||
"Exporting file %s from Koji build %s", file_pattern, build_id
|
|
||||||
)
|
|
||||||
build = self.proxy.getBuild(build_id)
|
build = self.proxy.getBuild(build_id)
|
||||||
self._download_build(build, file_pattern, target_dir)
|
self._download_build(build, file_pattern, target_dir)
|
||||||
|
|
||||||
@ -307,7 +349,7 @@ def get_file_from_scm(scm_dict, target_path, compose=None):
|
|||||||
scm_repo = scm_dict["repo"]
|
scm_repo = scm_dict["repo"]
|
||||||
scm_file = scm_dict["file"]
|
scm_file = scm_dict["file"]
|
||||||
scm_branch = scm_dict.get("branch", None)
|
scm_branch = scm_dict.get("branch", None)
|
||||||
command = scm_dict.get('command')
|
command = scm_dict.get("command")
|
||||||
|
|
||||||
logger = compose._logger if compose else None
|
logger = compose._logger if compose else None
|
||||||
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
||||||
|
@ -26,8 +26,13 @@ def get_variants_dtd(logger=None):
|
|||||||
"""
|
"""
|
||||||
variants_dtd = "/usr/share/pungi/variants.dtd"
|
variants_dtd = "/usr/share/pungi/variants.dtd"
|
||||||
if not os.path.isfile(variants_dtd):
|
if not os.path.isfile(variants_dtd):
|
||||||
devel_variants_dtd = os.path.normpath(os.path.realpath(
|
devel_variants_dtd = os.path.normpath(
|
||||||
os.path.join(os.path.dirname(__file__), "..", "..", "share", "variants.dtd")))
|
os.path.realpath(
|
||||||
|
os.path.join(
|
||||||
|
os.path.dirname(__file__), "..", "..", "share", "variants.dtd"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
msg = "Variants DTD not found: %s" % variants_dtd
|
msg = "Variants DTD not found: %s" % variants_dtd
|
||||||
if os.path.isfile(devel_variants_dtd):
|
if os.path.isfile(devel_variants_dtd):
|
||||||
if logger:
|
if logger:
|
||||||
@ -57,7 +62,7 @@ NO_WHITESPACE_ELEMENTS = [
|
|||||||
class VariantsXmlParser(object):
|
class VariantsXmlParser(object):
|
||||||
def __init__(self, file_obj, tree_arches=None, tree_variants=None, logger=None):
|
def __init__(self, file_obj, tree_arches=None, tree_variants=None, logger=None):
|
||||||
self.tree = lxml.etree.parse(file_obj)
|
self.tree = lxml.etree.parse(file_obj)
|
||||||
with open(get_variants_dtd(logger), 'r') as f:
|
with open(get_variants_dtd(logger), "r") as f:
|
||||||
self.dtd = lxml.etree.DTD(f)
|
self.dtd = lxml.etree.DTD(f)
|
||||||
self.addons = {}
|
self.addons = {}
|
||||||
self.variants = {}
|
self.variants = {}
|
||||||
@ -111,10 +116,15 @@ class VariantsXmlParser(object):
|
|||||||
"parent": parent,
|
"parent": parent,
|
||||||
}
|
}
|
||||||
if self.tree_arches:
|
if self.tree_arches:
|
||||||
variant_dict["arches"] = [i for i in variant_dict["arches"] if i in self.tree_arches]
|
variant_dict["arches"] = [
|
||||||
|
i for i in variant_dict["arches"] if i in self.tree_arches
|
||||||
|
]
|
||||||
if not variant_dict["arches"]:
|
if not variant_dict["arches"]:
|
||||||
if self.logger:
|
if self.logger:
|
||||||
self.logger.info('Excluding variant %s: all its arches are filtered.' % variant_dict['id'])
|
self.logger.info(
|
||||||
|
"Excluding variant %s: all its arches are filtered."
|
||||||
|
% variant_dict["id"]
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for grouplist_node in variant_node.xpath("groups"):
|
for grouplist_node in variant_node.xpath("groups"):
|
||||||
@ -141,7 +151,7 @@ class VariantsXmlParser(object):
|
|||||||
for module_node in modulelist_node.xpath("module"):
|
for module_node in modulelist_node.xpath("module"):
|
||||||
module = {
|
module = {
|
||||||
"name": str(module_node.text),
|
"name": str(module_node.text),
|
||||||
"glob": self._is_true(module_node.attrib.get("glob", "false"))
|
"glob": self._is_true(module_node.attrib.get("glob", "false")),
|
||||||
}
|
}
|
||||||
|
|
||||||
variant_dict["modules"].append(module)
|
variant_dict["modules"].append(module)
|
||||||
@ -151,7 +161,9 @@ class VariantsXmlParser(object):
|
|||||||
"name": str(kojitag_node.text),
|
"name": str(kojitag_node.text),
|
||||||
}
|
}
|
||||||
|
|
||||||
variant_dict["modular_koji_tags"] = variant_dict["modular_koji_tags"] or []
|
variant_dict["modular_koji_tags"] = (
|
||||||
|
variant_dict["modular_koji_tags"] or []
|
||||||
|
)
|
||||||
variant_dict["modular_koji_tags"].append(kojitag)
|
variant_dict["modular_koji_tags"].append(kojitag)
|
||||||
|
|
||||||
for environments_node in variant_node.xpath("environments"):
|
for environments_node in variant_node.xpath("environments"):
|
||||||
@ -188,28 +200,37 @@ class VariantsXmlParser(object):
|
|||||||
|
|
||||||
has_optional = self._is_true(variant_node.attrib.get("has_optional", "false"))
|
has_optional = self._is_true(variant_node.attrib.get("has_optional", "false"))
|
||||||
if has_optional and not contains_optional:
|
if has_optional and not contains_optional:
|
||||||
optional = Variant(id="optional", name="optional", type="optional",
|
optional = Variant(
|
||||||
arches=variant.arches, groups=[], parent=variant)
|
id="optional",
|
||||||
|
name="optional",
|
||||||
|
type="optional",
|
||||||
|
arches=variant.arches,
|
||||||
|
groups=[],
|
||||||
|
parent=variant,
|
||||||
|
)
|
||||||
self.add_child(optional, variant)
|
self.add_child(optional, variant)
|
||||||
|
|
||||||
for ref in variant_node.xpath("variants/ref/@id"):
|
for ref in variant_node.xpath("variants/ref/@id"):
|
||||||
try:
|
try:
|
||||||
child_variant = self.parse_variant_node(self.addons[ref], variant)
|
child_variant = self.parse_variant_node(self.addons[ref], variant)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise RuntimeError("Variant %s references non-existing variant %s"
|
raise RuntimeError(
|
||||||
% (variant.uid, ref))
|
"Variant %s references non-existing variant %s" % (variant.uid, ref)
|
||||||
|
)
|
||||||
self.add_child(child_variant, variant)
|
self.add_child(child_variant, variant)
|
||||||
|
|
||||||
# XXX: top-level optional
|
# XXX: top-level optional
|
||||||
# for ref in variant_node.xpath("variants/ref/@id"):
|
# for ref in variant_node.xpath("variants/ref/@id"):
|
||||||
# variant["variants"].append(copy.deepcopy(addons[ref]))
|
# variant["variants"].append(copy.deepcopy(addons[ref]))
|
||||||
|
|
||||||
return variant
|
return variant
|
||||||
|
|
||||||
def _is_excluded(self, variant):
|
def _is_excluded(self, variant):
|
||||||
if self.tree_variants and variant.uid not in self.tree_variants:
|
if self.tree_variants and variant.uid not in self.tree_variants:
|
||||||
if self.logger:
|
if self.logger:
|
||||||
self.logger.info('Excluding variant %s: filtered by configuration.' % variant)
|
self.logger.info(
|
||||||
|
"Excluding variant %s: filtered by configuration." % variant
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -225,7 +246,9 @@ class VariantsXmlParser(object):
|
|||||||
variant_id = str(variant_node.attrib["id"])
|
variant_id = str(variant_node.attrib["id"])
|
||||||
self.addons[variant_id] = variant_node
|
self.addons[variant_id] = variant_node
|
||||||
|
|
||||||
for variant_node in self.tree.xpath("/variants/variant[@type='layered-product']"):
|
for variant_node in self.tree.xpath(
|
||||||
|
"/variants/variant[@type='layered-product']"
|
||||||
|
):
|
||||||
variant_id = str(variant_node.attrib["id"])
|
variant_id = str(variant_node.attrib["id"])
|
||||||
self.addons[variant_id] = variant_node
|
self.addons[variant_id] = variant_node
|
||||||
|
|
||||||
@ -239,9 +262,20 @@ class VariantsXmlParser(object):
|
|||||||
|
|
||||||
|
|
||||||
class Variant(object):
|
class Variant(object):
|
||||||
def __init__(self, id, name, type, arches, groups, environments=None,
|
def __init__(
|
||||||
buildinstallpackages=None, is_empty=False, parent=None,
|
self,
|
||||||
modules=None, modular_koji_tags=None):
|
id,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
arches,
|
||||||
|
groups,
|
||||||
|
environments=None,
|
||||||
|
buildinstallpackages=None,
|
||||||
|
is_empty=False,
|
||||||
|
parent=None,
|
||||||
|
modules=None,
|
||||||
|
modular_koji_tags=None,
|
||||||
|
):
|
||||||
|
|
||||||
environments = environments or []
|
environments = environments or []
|
||||||
buildinstallpackages = buildinstallpackages or []
|
buildinstallpackages = buildinstallpackages or []
|
||||||
@ -257,7 +291,9 @@ class Variant(object):
|
|||||||
self.modules = sorted(self.modules, key=lambda x: x["name"])
|
self.modules = sorted(self.modules, key=lambda x: x["name"])
|
||||||
self.modular_koji_tags = copy.deepcopy(modular_koji_tags)
|
self.modular_koji_tags = copy.deepcopy(modular_koji_tags)
|
||||||
if self.modular_koji_tags:
|
if self.modular_koji_tags:
|
||||||
self.modular_koji_tags = sorted(self.modular_koji_tags, key=lambda x: x["name"])
|
self.modular_koji_tags = sorted(
|
||||||
|
self.modular_koji_tags, key=lambda x: x["name"]
|
||||||
|
)
|
||||||
self.buildinstallpackages = sorted(buildinstallpackages)
|
self.buildinstallpackages = sorted(buildinstallpackages)
|
||||||
self.variants = {}
|
self.variants = {}
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
@ -275,7 +311,9 @@ class Variant(object):
|
|||||||
return self.uid
|
return self.uid
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format(self)
|
return 'Variant(id="{0.id}", name="{0.name}", type="{0.type}", parent={0.parent})'.format(
|
||||||
|
self
|
||||||
|
)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.type == other.type and self.uid == other.uid
|
return self.type == other.type and self.uid == other.uid
|
||||||
@ -284,7 +322,7 @@ class Variant(object):
|
|||||||
return not (self == other)
|
return not (self == other)
|
||||||
|
|
||||||
def __lt__(self, other):
|
def __lt__(self, other):
|
||||||
ORDERING = {'variant': 0, 'addon': 1, 'layered-product': 1, 'optional': 2}
|
ORDERING = {"variant": 0, "addon": 1, "layered-product": 1, "optional": 2}
|
||||||
return (ORDERING[self.type], self.uid) < (ORDERING[other.type], other.uid)
|
return (ORDERING[self.type], self.uid) < (ORDERING[other.type], other.uid)
|
||||||
|
|
||||||
def __le__(self, other):
|
def __le__(self, other):
|
||||||
@ -313,11 +351,17 @@ class Variant(object):
|
|||||||
raise RuntimeError("Only 'variant' can contain another variants.")
|
raise RuntimeError("Only 'variant' can contain another variants.")
|
||||||
if variant.id == self.id:
|
if variant.id == self.id:
|
||||||
# due to os/<variant.id> path -- addon id would conflict with parent variant id
|
# due to os/<variant.id> path -- addon id would conflict with parent variant id
|
||||||
raise RuntimeError("Child variant id must be different than parent variant id: %s" % variant.id)
|
raise RuntimeError(
|
||||||
|
"Child variant id must be different than parent variant id: %s"
|
||||||
|
% variant.id
|
||||||
|
)
|
||||||
# sometimes an addon or layered product can be part of multiple variants with different set of arches
|
# sometimes an addon or layered product can be part of multiple variants with different set of arches
|
||||||
arches = sorted(set(self.arches).intersection(set(variant.arches)))
|
arches = sorted(set(self.arches).intersection(set(variant.arches)))
|
||||||
if self.arches and not arches:
|
if self.arches and not arches:
|
||||||
raise RuntimeError("%s: arch list %s does not intersect with parent arch list: %s" % (variant, variant.arches, self.arches))
|
raise RuntimeError(
|
||||||
|
"%s: arch list %s does not intersect with parent arch list: %s"
|
||||||
|
% (variant, variant.arches, self.arches)
|
||||||
|
)
|
||||||
variant.arches = arches
|
variant.arches = arches
|
||||||
self.variants[variant.id] = variant
|
self.variants[variant.id] = variant
|
||||||
variant.parent = self
|
variant.parent = self
|
||||||
@ -327,11 +371,12 @@ class Variant(object):
|
|||||||
|
|
||||||
types = types or ["self"]
|
types = types or ["self"]
|
||||||
result = copy.deepcopy(self.groups)
|
result = copy.deepcopy(self.groups)
|
||||||
for variant in self.get_variants(arch=arch, types=types,
|
for variant in self.get_variants(arch=arch, types=types, recursive=recursive):
|
||||||
recursive=recursive):
|
|
||||||
if variant == self:
|
if variant == self:
|
||||||
continue
|
continue
|
||||||
for group in variant.get_groups(arch=arch, types=types, recursive=recursive):
|
for group in variant.get_groups(
|
||||||
|
arch=arch, types=types, recursive=recursive
|
||||||
|
):
|
||||||
if group not in result:
|
if group not in result:
|
||||||
result.append(group)
|
result.append(group)
|
||||||
return result
|
return result
|
||||||
@ -344,12 +389,12 @@ class Variant(object):
|
|||||||
|
|
||||||
types = types or ["self"]
|
types = types or ["self"]
|
||||||
result = copy.deepcopy(self.modules)
|
result = copy.deepcopy(self.modules)
|
||||||
for variant in self.get_variants(arch=arch, types=types,
|
for variant in self.get_variants(arch=arch, types=types, recursive=recursive):
|
||||||
recursive=recursive):
|
|
||||||
if variant == self:
|
if variant == self:
|
||||||
continue
|
continue
|
||||||
for module in variant.get_modules(arch=arch, types=types,
|
for module in variant.get_modules(
|
||||||
recursive=recursive):
|
arch=arch, types=types, recursive=recursive
|
||||||
|
):
|
||||||
if module not in result:
|
if module not in result:
|
||||||
result.append(module)
|
result.append(module)
|
||||||
return result
|
return result
|
||||||
@ -362,12 +407,12 @@ class Variant(object):
|
|||||||
|
|
||||||
types = types or ["self"]
|
types = types or ["self"]
|
||||||
result = copy.deepcopy(self.modular_koji_tags)
|
result = copy.deepcopy(self.modular_koji_tags)
|
||||||
for variant in self.get_variants(arch=arch, types=types,
|
for variant in self.get_variants(arch=arch, types=types, recursive=recursive):
|
||||||
recursive=recursive):
|
|
||||||
if variant == self:
|
if variant == self:
|
||||||
continue
|
continue
|
||||||
for koji_tag in variant.get_modular_koji_tags(
|
for koji_tag in variant.get_modular_koji_tags(
|
||||||
arch=arch, types=types, recursive=recursive):
|
arch=arch, types=types, recursive=recursive
|
||||||
|
):
|
||||||
if koji_tag not in result:
|
if koji_tag not in result:
|
||||||
result.append(koji_tag)
|
result.append(koji_tag)
|
||||||
return result
|
return result
|
||||||
@ -398,7 +443,11 @@ class Variant(object):
|
|||||||
continue
|
continue
|
||||||
result.append(variant)
|
result.append(variant)
|
||||||
if recursive:
|
if recursive:
|
||||||
result.extend(variant.get_variants(types=[i for i in types if i != "self"], recursive=True))
|
result.extend(
|
||||||
|
variant.get_variants(
|
||||||
|
types=[i for i in types if i != "self"], recursive=True
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -505,12 +505,14 @@ def get_compose_data(compose_path):
|
|||||||
"release_is_layered": compose.info.release.is_layered,
|
"release_is_layered": compose.info.release.is_layered,
|
||||||
}
|
}
|
||||||
if compose.info.release.is_layered:
|
if compose.info.release.is_layered:
|
||||||
data.update({
|
data.update(
|
||||||
|
{
|
||||||
"base_product_name": compose.info.base_product.name,
|
"base_product_name": compose.info.base_product.name,
|
||||||
"base_product_short": compose.info.base_product.short,
|
"base_product_short": compose.info.base_product.short,
|
||||||
"base_product_version": compose.info.base_product.version,
|
"base_product_version": compose.info.base_product.version,
|
||||||
"base_product_type": compose.info.base_product.type,
|
"base_product_type": compose.info.base_product.type,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
return data
|
return data
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
return {}
|
return {}
|
||||||
@ -549,6 +551,7 @@ def send_notification(compose_dir, command, parts):
|
|||||||
if not command:
|
if not command:
|
||||||
return
|
return
|
||||||
from pungi.notifier import PungiNotifier
|
from pungi.notifier import PungiNotifier
|
||||||
|
|
||||||
data = get_compose_data(compose_dir)
|
data = get_compose_data(compose_dir)
|
||||||
data["location"] = try_translate_path(parts, compose_dir)
|
data["location"] = try_translate_path(parts, compose_dir)
|
||||||
notifier = PungiNotifier([command])
|
notifier = PungiNotifier([command])
|
||||||
|
@ -24,60 +24,62 @@ from pungi.wrappers import iso
|
|||||||
|
|
||||||
|
|
||||||
def sh(log, cmd, *args, **kwargs):
|
def sh(log, cmd, *args, **kwargs):
|
||||||
log.info('Running: %s', ' '.join(shlex_quote(x) for x in cmd))
|
log.info("Running: %s", " ".join(shlex_quote(x) for x in cmd))
|
||||||
ret, out = shortcuts.run(cmd, *args, universal_newlines=True, **kwargs)
|
ret, out = shortcuts.run(cmd, *args, universal_newlines=True, **kwargs)
|
||||||
if out:
|
if out:
|
||||||
log.debug('%s', out)
|
log.debug("%s", out)
|
||||||
return ret, out
|
return ret, out
|
||||||
|
|
||||||
|
|
||||||
def get_lorax_dir(default='/usr/share/lorax'):
|
def get_lorax_dir(default="/usr/share/lorax"):
|
||||||
try:
|
try:
|
||||||
_, out = shortcuts.run(['python3', '-c' 'import pylorax; print(pylorax.find_templates())'],
|
_, out = shortcuts.run(
|
||||||
universal_newlines=True)
|
["python3", "-c" "import pylorax; print(pylorax.find_templates())"],
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
return out.strip()
|
return out.strip()
|
||||||
except Exception:
|
except Exception:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
|
||||||
def as_bool(arg):
|
def as_bool(arg):
|
||||||
if arg == 'true':
|
if arg == "true":
|
||||||
return True
|
return True
|
||||||
elif arg == 'false':
|
elif arg == "false":
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
|
|
||||||
def get_arch(log, iso_dir):
|
def get_arch(log, iso_dir):
|
||||||
di_path = os.path.join(iso_dir, '.discinfo')
|
di_path = os.path.join(iso_dir, ".discinfo")
|
||||||
if os.path.exists(di_path):
|
if os.path.exists(di_path):
|
||||||
di = productmd.discinfo.DiscInfo()
|
di = productmd.discinfo.DiscInfo()
|
||||||
di.load(di_path)
|
di.load(di_path)
|
||||||
log.info('Detected bootable ISO for %s (based on .discinfo)', di.arch)
|
log.info("Detected bootable ISO for %s (based on .discinfo)", di.arch)
|
||||||
return di.arch
|
return di.arch
|
||||||
|
|
||||||
ti_path = os.path.join(iso_dir, '.treeinfo')
|
ti_path = os.path.join(iso_dir, ".treeinfo")
|
||||||
if os.path.exists(ti_path):
|
if os.path.exists(ti_path):
|
||||||
ti = productmd.treeinfo.TreeInfo()
|
ti = productmd.treeinfo.TreeInfo()
|
||||||
ti.load(ti_path)
|
ti.load(ti_path)
|
||||||
log.info('Detected bootable ISO for %s (based on .treeinfo)', ti.tree.arch)
|
log.info("Detected bootable ISO for %s (based on .treeinfo)", ti.tree.arch)
|
||||||
return ti.tree.arch
|
return ti.tree.arch
|
||||||
|
|
||||||
# There is no way to tell the architecture of an ISO file without guessing.
|
# There is no way to tell the architecture of an ISO file without guessing.
|
||||||
# Let's print a warning and continue with assuming unbootable ISO.
|
# Let's print a warning and continue with assuming unbootable ISO.
|
||||||
|
|
||||||
log.warning('Failed to detect arch for ISO, assuming unbootable one.')
|
log.warning("Failed to detect arch for ISO, assuming unbootable one.")
|
||||||
log.warning('If this is incorrect, use the --force-arch option.')
|
log.warning("If this is incorrect, use the --force-arch option.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def run(log, opts):
|
def run(log, opts):
|
||||||
# mount source iso
|
# mount source iso
|
||||||
log.info('Mounting %s', opts.source)
|
log.info("Mounting %s", opts.source)
|
||||||
target = os.path.abspath(opts.target)
|
target = os.path.abspath(opts.target)
|
||||||
|
|
||||||
with util.temp_dir(prefix='patch-iso-') as work_dir:
|
with util.temp_dir(prefix="patch-iso-") as work_dir:
|
||||||
with iso.mount(opts.source) as source_iso_dir:
|
with iso.mount(opts.source) as source_iso_dir:
|
||||||
util.copy_all(source_iso_dir, work_dir)
|
util.copy_all(source_iso_dir, work_dir)
|
||||||
|
|
||||||
@ -94,29 +96,34 @@ def run(log, opts):
|
|||||||
# create graft points from mounted source iso + overlay dir
|
# create graft points from mounted source iso + overlay dir
|
||||||
graft_points = iso.get_graft_points([work_dir] + opts.dirs)
|
graft_points = iso.get_graft_points([work_dir] + opts.dirs)
|
||||||
# if ks.cfg is detected, patch syslinux + grub to use it
|
# if ks.cfg is detected, patch syslinux + grub to use it
|
||||||
if 'ks.cfg' in graft_points:
|
if "ks.cfg" in graft_points:
|
||||||
log.info('Adding ks.cfg to boot configs')
|
log.info("Adding ks.cfg to boot configs")
|
||||||
tweak_configs(work_dir, volume_id, graft_points['ks.cfg'], logger=log)
|
tweak_configs(work_dir, volume_id, graft_points["ks.cfg"], logger=log)
|
||||||
|
|
||||||
arch = opts.force_arch or get_arch(log, work_dir)
|
arch = opts.force_arch or get_arch(log, work_dir)
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(prefix='graft-points-') as graft_file:
|
with tempfile.NamedTemporaryFile(prefix="graft-points-") as graft_file:
|
||||||
iso.write_graft_points(graft_file.name, graft_points,
|
iso.write_graft_points(
|
||||||
exclude=["*/TRANS.TBL", "*/boot.cat"])
|
graft_file.name, graft_points, exclude=["*/TRANS.TBL", "*/boot.cat"]
|
||||||
|
)
|
||||||
|
|
||||||
# make the target iso bootable if source iso is bootable
|
# make the target iso bootable if source iso is bootable
|
||||||
boot_args = input_charset = None
|
boot_args = input_charset = None
|
||||||
if arch:
|
if arch:
|
||||||
boot_args = iso.get_boot_options(
|
boot_args = iso.get_boot_options(
|
||||||
arch, os.path.join(get_lorax_dir(), 'config_files/ppc'))
|
arch, os.path.join(get_lorax_dir(), "config_files/ppc")
|
||||||
input_charset = 'utf-8' if 'ppc' not in arch else None
|
)
|
||||||
|
input_charset = "utf-8" if "ppc" not in arch else None
|
||||||
# Create the target ISO
|
# Create the target ISO
|
||||||
mkisofs_cmd = iso.get_mkisofs_cmd(target, None,
|
mkisofs_cmd = iso.get_mkisofs_cmd(
|
||||||
|
target,
|
||||||
|
None,
|
||||||
volid=volume_id,
|
volid=volume_id,
|
||||||
exclude=["./lost+found"],
|
exclude=["./lost+found"],
|
||||||
graft_points=graft_file.name,
|
graft_points=graft_file.name,
|
||||||
input_charset=input_charset,
|
input_charset=input_charset,
|
||||||
boot_args=boot_args)
|
boot_args=boot_args,
|
||||||
|
)
|
||||||
sh(log, mkisofs_cmd, workdir=work_dir)
|
sh(log, mkisofs_cmd, workdir=work_dir)
|
||||||
|
|
||||||
# isohybrid support
|
# isohybrid support
|
||||||
@ -124,7 +131,9 @@ def run(log, opts):
|
|||||||
isohybrid_cmd = iso.get_isohybrid_cmd(target, arch)
|
isohybrid_cmd = iso.get_isohybrid_cmd(target, arch)
|
||||||
sh(log, isohybrid_cmd)
|
sh(log, isohybrid_cmd)
|
||||||
|
|
||||||
supported = as_bool(opts.supported or iso.get_checkisomd5_data(opts.source)['Supported ISO'])
|
supported = as_bool(
|
||||||
|
opts.supported or iso.get_checkisomd5_data(opts.source)["Supported ISO"]
|
||||||
|
)
|
||||||
# implantmd5 + supported bit (use the same as on source iso, unless
|
# implantmd5 + supported bit (use the same as on source iso, unless
|
||||||
# overriden by --supported option)
|
# overriden by --supported option)
|
||||||
isomd5sum_cmd = iso.get_implantisomd5_cmd(target, supported)
|
isomd5sum_cmd = iso.get_implantisomd5_cmd(target, supported)
|
||||||
|
@ -47,13 +47,19 @@ def ti_merge(one, two):
|
|||||||
var.uid = variant.uid
|
var.uid = variant.uid
|
||||||
var.name = variant.name
|
var.name = variant.name
|
||||||
var.type = variant.type
|
var.type = variant.type
|
||||||
for i in ("debug_packages", "debug_repository", "packages", "repository",
|
for i in (
|
||||||
"source_packages", "source_repository"):
|
"debug_packages",
|
||||||
|
"debug_repository",
|
||||||
|
"packages",
|
||||||
|
"repository",
|
||||||
|
"source_packages",
|
||||||
|
"source_repository",
|
||||||
|
):
|
||||||
setattr(var, i, getattr(variant, i, None))
|
setattr(var, i, getattr(variant, i, None))
|
||||||
one.variants.add(var)
|
one.variants.add(var)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CHECKSUMS = ['md5', 'sha1', 'sha256']
|
DEFAULT_CHECKSUMS = ["md5", "sha1", "sha256"]
|
||||||
|
|
||||||
|
|
||||||
class UnifiedISO(object):
|
class UnifiedISO(object):
|
||||||
@ -93,8 +99,8 @@ class UnifiedISO(object):
|
|||||||
shutil.rmtree(self.temp_dir)
|
shutil.rmtree(self.temp_dir)
|
||||||
|
|
||||||
def dump_manifest(self):
|
def dump_manifest(self):
|
||||||
dest = os.path.join(self.compose_path, 'metadata', 'images.json')
|
dest = os.path.join(self.compose_path, "metadata", "images.json")
|
||||||
tmp_file = dest + '.tmp'
|
tmp_file = dest + ".tmp"
|
||||||
try:
|
try:
|
||||||
self.get_image_manifest().dump(tmp_file)
|
self.get_image_manifest().dump(tmp_file)
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -106,7 +112,13 @@ class UnifiedISO(object):
|
|||||||
os.rename(tmp_file, dest)
|
os.rename(tmp_file, dest)
|
||||||
|
|
||||||
def _link_tree(self, dir, variant, arch):
|
def _link_tree(self, dir, variant, arch):
|
||||||
blacklist_files = [".treeinfo", ".discinfo", "boot.iso", "media.repo", "extra_files.json"]
|
blacklist_files = [
|
||||||
|
".treeinfo",
|
||||||
|
".discinfo",
|
||||||
|
"boot.iso",
|
||||||
|
"media.repo",
|
||||||
|
"extra_files.json",
|
||||||
|
]
|
||||||
blacklist_dirs = ["repodata"]
|
blacklist_dirs = ["repodata"]
|
||||||
|
|
||||||
for root, dirs, files in os.walk(dir):
|
for root, dirs, files in os.walk(dir):
|
||||||
@ -120,8 +132,12 @@ class UnifiedISO(object):
|
|||||||
|
|
||||||
old_path = os.path.join(root, fn)
|
old_path = os.path.join(root, fn)
|
||||||
if fn.endswith(".rpm"):
|
if fn.endswith(".rpm"):
|
||||||
new_path = os.path.join(self.temp_dir, "trees", arch, variant.uid, fn)
|
new_path = os.path.join(
|
||||||
self.repos.setdefault(arch, {})[variant.uid] = os.path.dirname(new_path)
|
self.temp_dir, "trees", arch, variant.uid, fn
|
||||||
|
)
|
||||||
|
self.repos.setdefault(arch, {})[variant.uid] = os.path.dirname(
|
||||||
|
new_path
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
old_relpath = os.path.relpath(old_path, dir)
|
old_relpath = os.path.relpath(old_path, dir)
|
||||||
new_path = os.path.join(self.temp_dir, "trees", arch, old_relpath)
|
new_path = os.path.join(self.temp_dir, "trees", arch, old_relpath)
|
||||||
@ -130,8 +146,11 @@ class UnifiedISO(object):
|
|||||||
try:
|
try:
|
||||||
self.linker.link(old_path, new_path)
|
self.linker.link(old_path, new_path)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
print("Failed to link %s to %s: %s" % (old_path, new_path, exc.strerror),
|
print(
|
||||||
file=sys.stderr)
|
"Failed to link %s to %s: %s"
|
||||||
|
% (old_path, new_path, exc.strerror),
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def link_to_temp(self):
|
def link_to_temp(self):
|
||||||
@ -140,7 +159,9 @@ class UnifiedISO(object):
|
|||||||
for arch in variant.arches:
|
for arch in variant.arches:
|
||||||
print("Processing: {0}.{1}".format(variant.uid, arch))
|
print("Processing: {0}.{1}".format(variant.uid, arch))
|
||||||
try:
|
try:
|
||||||
tree_dir = os.path.join(self.compose_path, variant.paths.os_tree[arch])
|
tree_dir = os.path.join(
|
||||||
|
self.compose_path, variant.paths.os_tree[arch]
|
||||||
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# The path in metadata is missing: no content there
|
# The path in metadata is missing: no content there
|
||||||
continue
|
continue
|
||||||
@ -151,9 +172,11 @@ class UnifiedISO(object):
|
|||||||
except IOError as exc:
|
except IOError as exc:
|
||||||
if exc.errno != errno.ENOENT:
|
if exc.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
print('Tree %s.%s has no .treeinfo, skipping...'
|
print(
|
||||||
|
"Tree %s.%s has no .treeinfo, skipping..."
|
||||||
% (variant.uid, arch),
|
% (variant.uid, arch),
|
||||||
file=sys.stderr)
|
file=sys.stderr,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
arch_ti = self.treeinfo.get(arch)
|
arch_ti = self.treeinfo.get(arch)
|
||||||
@ -164,27 +187,38 @@ class UnifiedISO(object):
|
|||||||
ti_merge(arch_ti, ti)
|
ti_merge(arch_ti, ti)
|
||||||
|
|
||||||
if arch_ti.tree.arch != arch:
|
if arch_ti.tree.arch != arch:
|
||||||
raise RuntimeError('Treeinfo arch mismatch')
|
raise RuntimeError("Treeinfo arch mismatch")
|
||||||
|
|
||||||
# override paths
|
# override paths
|
||||||
arch_ti[variant.uid].repository = variant.uid
|
arch_ti[variant.uid].repository = variant.uid
|
||||||
arch_ti[variant.uid].packages = variant.uid
|
arch_ti[variant.uid].packages = variant.uid
|
||||||
|
|
||||||
comps_path = glob.glob(os.path.join(self.compose_path,
|
comps_path = glob.glob(
|
||||||
|
os.path.join(
|
||||||
|
self.compose_path,
|
||||||
variant.paths.repository[arch],
|
variant.paths.repository[arch],
|
||||||
"repodata", "*comps*.xml"))
|
"repodata",
|
||||||
|
"*comps*.xml",
|
||||||
|
)
|
||||||
|
)
|
||||||
if comps_path:
|
if comps_path:
|
||||||
self.comps.setdefault(arch, {})[variant.uid] = comps_path[0]
|
self.comps.setdefault(arch, {})[variant.uid] = comps_path[0]
|
||||||
|
|
||||||
productid_path = os.path.join(self.compose_path, variant.paths.repository[arch],
|
productid_path = os.path.join(
|
||||||
"repodata", "productid")
|
self.compose_path,
|
||||||
|
variant.paths.repository[arch],
|
||||||
|
"repodata",
|
||||||
|
"productid",
|
||||||
|
)
|
||||||
self.productid.setdefault(arch, {})[variant.uid] = productid_path
|
self.productid.setdefault(arch, {})[variant.uid] = productid_path
|
||||||
|
|
||||||
self._link_tree(tree_dir, variant, arch)
|
self._link_tree(tree_dir, variant, arch)
|
||||||
|
|
||||||
# sources
|
# sources
|
||||||
print("Processing: {0}.{1}".format(variant.uid, "src"))
|
print("Processing: {0}.{1}".format(variant.uid, "src"))
|
||||||
tree_dir = os.path.join(self.compose_path, variant.paths.source_tree[arch])
|
tree_dir = os.path.join(
|
||||||
|
self.compose_path, variant.paths.source_tree[arch]
|
||||||
|
)
|
||||||
ti = productmd.treeinfo.TreeInfo()
|
ti = productmd.treeinfo.TreeInfo()
|
||||||
ti.load(os.path.join(tree_dir, ".treeinfo"))
|
ti.load(os.path.join(tree_dir, ".treeinfo"))
|
||||||
|
|
||||||
@ -196,7 +230,7 @@ class UnifiedISO(object):
|
|||||||
ti_merge(arch_ti, ti)
|
ti_merge(arch_ti, ti)
|
||||||
|
|
||||||
if arch_ti.tree.arch != "src":
|
if arch_ti.tree.arch != "src":
|
||||||
raise RuntimeError('Treeinfo arch mismatch')
|
raise RuntimeError("Treeinfo arch mismatch")
|
||||||
|
|
||||||
# override paths
|
# override paths
|
||||||
arch_ti[variant.uid].repository = variant.uid
|
arch_ti[variant.uid].repository = variant.uid
|
||||||
@ -205,13 +239,15 @@ class UnifiedISO(object):
|
|||||||
# arch_ti[variant.uid].source_repository = variant.uid
|
# arch_ti[variant.uid].source_repository = variant.uid
|
||||||
# arch_ti[variant.uid].source_packages = variant.uid
|
# arch_ti[variant.uid].source_packages = variant.uid
|
||||||
|
|
||||||
self._link_tree(tree_dir, variant, 'src')
|
self._link_tree(tree_dir, variant, "src")
|
||||||
|
|
||||||
# Debuginfo
|
# Debuginfo
|
||||||
print("Processing: {0}.{1} debuginfo".format(variant.uid, arch))
|
print("Processing: {0}.{1} debuginfo".format(variant.uid, arch))
|
||||||
tree_dir = os.path.join(self.compose_path, variant.paths.debug_tree[arch])
|
tree_dir = os.path.join(
|
||||||
|
self.compose_path, variant.paths.debug_tree[arch]
|
||||||
|
)
|
||||||
|
|
||||||
debug_arch = 'debug-%s' % arch
|
debug_arch = "debug-%s" % arch
|
||||||
|
|
||||||
# We don't have a .treeinfo for debuginfo trees. Let's just
|
# We don't have a .treeinfo for debuginfo trees. Let's just
|
||||||
# copy the one from binary tree.
|
# copy the one from binary tree.
|
||||||
@ -236,7 +272,9 @@ class UnifiedISO(object):
|
|||||||
tree_dir = os.path.join(self.temp_dir, "trees", arch)
|
tree_dir = os.path.join(self.temp_dir, "trees", arch)
|
||||||
repo_path = self.repos[arch][variant]
|
repo_path = self.repos[arch][variant]
|
||||||
comps_path = self.comps.get(arch, {}).get(variant, None)
|
comps_path = self.comps.get(arch, {}).get(variant, None)
|
||||||
cmd = cr.get_createrepo_cmd(repo_path, groupfile=comps_path, update=True)
|
cmd = cr.get_createrepo_cmd(
|
||||||
|
repo_path, groupfile=comps_path, update=True
|
||||||
|
)
|
||||||
run(cmd, show_cmd=True)
|
run(cmd, show_cmd=True)
|
||||||
|
|
||||||
productid_path = self.productid.get(arch, {}).get(variant, None)
|
productid_path = self.productid.get(arch, {}).get(variant, None)
|
||||||
@ -247,15 +285,27 @@ class UnifiedISO(object):
|
|||||||
|
|
||||||
if os.path.exists(productid_path):
|
if os.path.exists(productid_path):
|
||||||
shutil.copy2(productid_path, new_path)
|
shutil.copy2(productid_path, new_path)
|
||||||
cmd = cr.get_modifyrepo_cmd(repo_dir, new_path, compress_type="gz")
|
cmd = cr.get_modifyrepo_cmd(
|
||||||
|
repo_dir, new_path, compress_type="gz"
|
||||||
|
)
|
||||||
run(cmd)
|
run(cmd)
|
||||||
else:
|
else:
|
||||||
print("WARNING: productid not found in {0}.{1}".format(variant, arch))
|
print(
|
||||||
|
"WARNING: productid not found in {0}.{1}".format(
|
||||||
|
variant, arch
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
print("Inserting new repomd.xml checksum to treeinfo: {0}.{1}".format(variant, arch))
|
print(
|
||||||
|
"Inserting new repomd.xml checksum to treeinfo: {0}.{1}".format(
|
||||||
|
variant, arch
|
||||||
|
)
|
||||||
|
)
|
||||||
# insert new repomd.xml checksum to treeinfo
|
# insert new repomd.xml checksum to treeinfo
|
||||||
repomd_path = os.path.join(repo_path, "repodata", "repomd.xml")
|
repomd_path = os.path.join(repo_path, "repodata", "repomd.xml")
|
||||||
ti.checksums.add(os.path.relpath(repomd_path, tree_dir), 'sha256', root_dir=tree_dir)
|
ti.checksums.add(
|
||||||
|
os.path.relpath(repomd_path, tree_dir), "sha256", root_dir=tree_dir
|
||||||
|
)
|
||||||
|
|
||||||
# write treeinfo
|
# write treeinfo
|
||||||
for arch, ti in self.treeinfo.items():
|
for arch, ti in self.treeinfo.items():
|
||||||
@ -270,17 +320,25 @@ class UnifiedISO(object):
|
|||||||
di_path = os.path.join(self.temp_dir, "trees", arch, ".discinfo")
|
di_path = os.path.join(self.temp_dir, "trees", arch, ".discinfo")
|
||||||
description = "%s %s" % (ti.release.name, ti.release.version)
|
description = "%s %s" % (ti.release.name, ti.release.version)
|
||||||
if ti.release.is_layered:
|
if ti.release.is_layered:
|
||||||
description += " for %s %s" % (ti.base_product.name, ti.base_product.version)
|
description += " for %s %s" % (
|
||||||
create_discinfo(di_path, description, arch.split('-', 1)[-1])
|
ti.base_product.name,
|
||||||
|
ti.base_product.version,
|
||||||
|
)
|
||||||
|
create_discinfo(di_path, description, arch.split("-", 1)[-1])
|
||||||
|
|
||||||
def read_config(self):
|
def read_config(self):
|
||||||
try:
|
try:
|
||||||
conf_dump = glob.glob(os.path.join(self.compose_path,
|
conf_dump = glob.glob(
|
||||||
'../logs/global/config-dump*.global.log'))[0]
|
os.path.join(
|
||||||
|
self.compose_path, "../logs/global/config-dump*.global.log"
|
||||||
|
)
|
||||||
|
)[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print('Config dump not found, can not adhere to previous settings. '
|
print(
|
||||||
'Expect weird naming and checksums.',
|
"Config dump not found, can not adhere to previous settings. "
|
||||||
file=sys.stderr)
|
"Expect weird naming and checksums.",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
return {}
|
return {}
|
||||||
with open(conf_dump) as f:
|
with open(conf_dump) as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
@ -291,8 +349,8 @@ class UnifiedISO(object):
|
|||||||
|
|
||||||
for typed_arch, ti in self.treeinfo.items():
|
for typed_arch, ti in self.treeinfo.items():
|
||||||
source_dir = os.path.join(self.temp_dir, "trees", typed_arch)
|
source_dir = os.path.join(self.temp_dir, "trees", typed_arch)
|
||||||
arch = typed_arch.split('-', 1)[-1]
|
arch = typed_arch.split("-", 1)[-1]
|
||||||
debuginfo = typed_arch.startswith('debug-')
|
debuginfo = typed_arch.startswith("debug-")
|
||||||
|
|
||||||
# XXX: HARDCODED
|
# XXX: HARDCODED
|
||||||
disc_type = "dvd"
|
disc_type = "dvd"
|
||||||
@ -301,7 +359,7 @@ class UnifiedISO(object):
|
|||||||
if arch == "src":
|
if arch == "src":
|
||||||
iso_arch = "source"
|
iso_arch = "source"
|
||||||
elif debuginfo:
|
elif debuginfo:
|
||||||
iso_arch = arch + '-debuginfo'
|
iso_arch = arch + "-debuginfo"
|
||||||
|
|
||||||
iso_name = "%s-%s-%s.iso" % (self.ci.compose.id, iso_arch, disc_type)
|
iso_name = "%s-%s-%s.iso" % (self.ci.compose.id, iso_arch, disc_type)
|
||||||
iso_dir = os.path.join(self.temp_dir, "iso", iso_arch)
|
iso_dir = os.path.join(self.temp_dir, "iso", iso_arch)
|
||||||
@ -315,7 +373,11 @@ class UnifiedISO(object):
|
|||||||
volid += " debuginfo"
|
volid += " debuginfo"
|
||||||
|
|
||||||
# create ISO
|
# create ISO
|
||||||
run(iso.get_mkisofs_cmd(iso_path, [source_dir], volid=volid, exclude=["./lost+found"]))
|
run(
|
||||||
|
iso.get_mkisofs_cmd(
|
||||||
|
iso_path, [source_dir], volid=volid, exclude=["./lost+found"]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# implant MD5
|
# implant MD5
|
||||||
supported = True
|
supported = True
|
||||||
@ -332,7 +394,7 @@ class UnifiedISO(object):
|
|||||||
img.arch = arch
|
img.arch = arch
|
||||||
|
|
||||||
# XXX: HARDCODED
|
# XXX: HARDCODED
|
||||||
img.type = "dvd" if not debuginfo else 'dvd-debuginfo'
|
img.type = "dvd" if not debuginfo else "dvd-debuginfo"
|
||||||
img.format = "iso"
|
img.format = "iso"
|
||||||
img.disc_number = 1
|
img.disc_number = 1
|
||||||
img.disc_count = 1
|
img.disc_count = 1
|
||||||
@ -351,7 +413,7 @@ class UnifiedISO(object):
|
|||||||
all_arches = [arch]
|
all_arches = [arch]
|
||||||
|
|
||||||
for tree_arch in all_arches:
|
for tree_arch in all_arches:
|
||||||
if tree_arch.startswith('debug-'):
|
if tree_arch.startswith("debug-"):
|
||||||
continue
|
continue
|
||||||
ti = self.treeinfo[tree_arch]
|
ti = self.treeinfo[tree_arch]
|
||||||
for variant_uid in ti.variants:
|
for variant_uid in ti.variants:
|
||||||
@ -366,49 +428,51 @@ class UnifiedISO(object):
|
|||||||
for var in self.ci.get_variants(recursive=False)
|
for var in self.ci.get_variants(recursive=False)
|
||||||
if var.uid != variant_uid
|
if var.uid != variant_uid
|
||||||
]
|
]
|
||||||
paths_attr = 'isos' if arch != 'src' else 'source_isos'
|
paths_attr = "isos" if arch != "src" else "source_isos"
|
||||||
paths = getattr(self.ci.variants[variant.uid].paths, paths_attr)
|
paths = getattr(self.ci.variants[variant.uid].paths, paths_attr)
|
||||||
path = paths.get(tree_arch, os.path.join(variant.uid, tree_arch, "iso"))
|
path = paths.get(
|
||||||
if variant_img.type == 'dvd-debuginfo':
|
tree_arch, os.path.join(variant.uid, tree_arch, "iso")
|
||||||
prefix, isodir = path.rsplit('/', 1)
|
|
||||||
path = os.path.join(prefix, 'debug', isodir)
|
|
||||||
variant_img.path = os.path.join(
|
|
||||||
path,
|
|
||||||
os.path.basename(img.path)
|
|
||||||
)
|
)
|
||||||
|
if variant_img.type == "dvd-debuginfo":
|
||||||
|
prefix, isodir = path.rsplit("/", 1)
|
||||||
|
path = os.path.join(prefix, "debug", isodir)
|
||||||
|
variant_img.path = os.path.join(path, os.path.basename(img.path))
|
||||||
im.add(variant.uid, tree_arch, variant_img)
|
im.add(variant.uid, tree_arch, variant_img)
|
||||||
|
|
||||||
dst = os.path.join(self.compose_path, variant_img.path)
|
dst = os.path.join(self.compose_path, variant_img.path)
|
||||||
print("Linking {0} -> {1}".format(iso_path, dst))
|
print("Linking {0} -> {1}".format(iso_path, dst))
|
||||||
makedirs(os.path.dirname(dst))
|
makedirs(os.path.dirname(dst))
|
||||||
self.linker.link(iso_path, dst)
|
self.linker.link(iso_path, dst)
|
||||||
self.linker.link(iso_path + '.manifest', dst + '.manifest')
|
self.linker.link(iso_path + ".manifest", dst + ".manifest")
|
||||||
|
|
||||||
def _get_base_filename(self, variant, arch):
|
def _get_base_filename(self, variant, arch):
|
||||||
substs = {
|
substs = {
|
||||||
'compose_id': self.compose.info.compose.id,
|
"compose_id": self.compose.info.compose.id,
|
||||||
'release_short': self.compose.info.release.short,
|
"release_short": self.compose.info.release.short,
|
||||||
'version': self.compose.info.release.version,
|
"version": self.compose.info.release.version,
|
||||||
'date': self.compose.info.compose.date,
|
"date": self.compose.info.compose.date,
|
||||||
'respin': self.compose.info.compose.respin,
|
"respin": self.compose.info.compose.respin,
|
||||||
'type': self.compose.info.compose.type,
|
"type": self.compose.info.compose.type,
|
||||||
'type_suffix': self.compose.info.compose.type_suffix,
|
"type_suffix": self.compose.info.compose.type_suffix,
|
||||||
'label': self.compose.info.compose.label,
|
"label": self.compose.info.compose.label,
|
||||||
'label_major_version': self.compose.info.compose.label_major_version,
|
"label_major_version": self.compose.info.compose.label_major_version,
|
||||||
'variant': variant,
|
"variant": variant,
|
||||||
'arch': arch,
|
"arch": arch,
|
||||||
}
|
}
|
||||||
base_name = self.conf.get('media_checksum_base_filename', '')
|
base_name = self.conf.get("media_checksum_base_filename", "")
|
||||||
if base_name:
|
if base_name:
|
||||||
base_name = (base_name % substs).format(**substs)
|
base_name = (base_name % substs).format(**substs)
|
||||||
base_name += '-'
|
base_name += "-"
|
||||||
return base_name
|
return base_name
|
||||||
|
|
||||||
def update_checksums(self):
|
def update_checksums(self):
|
||||||
make_checksums(self.compose_path, self.get_image_manifest(),
|
make_checksums(
|
||||||
self.conf.get('media_checksums', DEFAULT_CHECKSUMS),
|
self.compose_path,
|
||||||
self.conf.get('media_checksum_one_file', False),
|
self.get_image_manifest(),
|
||||||
self._get_base_filename)
|
self.conf.get("media_checksums", DEFAULT_CHECKSUMS),
|
||||||
|
self.conf.get("media_checksum_one_file", False),
|
||||||
|
self._get_base_filename,
|
||||||
|
)
|
||||||
|
|
||||||
def get_image_manifest(self):
|
def get_image_manifest(self):
|
||||||
if not self.images:
|
if not self.images:
|
||||||
|
82
setup.py
82
setup.py
@ -24,61 +24,47 @@ packages = sorted(packages)
|
|||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name = "pungi",
|
name="pungi",
|
||||||
version = "4.2.0",
|
version="4.2.0",
|
||||||
description = "Distribution compose tool",
|
description="Distribution compose tool",
|
||||||
url = "https://pagure.io/pungi",
|
url="https://pagure.io/pungi",
|
||||||
author = "Dennis Gilmore",
|
author="Dennis Gilmore",
|
||||||
author_email = "dgilmore@fedoraproject.org",
|
author_email="dgilmore@fedoraproject.org",
|
||||||
license = "GPLv2",
|
license="GPLv2",
|
||||||
|
packages=packages,
|
||||||
packages = packages,
|
entry_points={
|
||||||
entry_points = {
|
"console_scripts": [
|
||||||
'console_scripts': [
|
"comps_filter = pungi.scripts.comps_filter:main",
|
||||||
'comps_filter = pungi.scripts.comps_filter:main',
|
"pungi = pungi.scripts.pungi:main",
|
||||||
'pungi = pungi.scripts.pungi:main',
|
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
||||||
'pungi-create-unified-isos = pungi.scripts.create_unified_isos:main',
|
"pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
|
||||||
'pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main',
|
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||||
'pungi-patch-iso = pungi.scripts.patch_iso:cli_main',
|
"pungi-make-ostree = pungi.ostree:main",
|
||||||
'pungi-make-ostree = pungi.ostree:main',
|
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
||||||
'pungi-notification-report-progress = pungi.scripts.report_progress:main',
|
"pungi-orchestrate = pungi_utils.orchestrator:main",
|
||||||
'pungi-orchestrate = pungi_utils.orchestrator:main',
|
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main",
|
||||||
'pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main',
|
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
||||||
'pungi-koji = pungi.scripts.pungi_koji:cli_main',
|
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
||||||
'pungi-gather = pungi.scripts.pungi_gather:cli_main',
|
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||||
'pungi-config-dump = pungi.scripts.config_dump:cli_main',
|
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
||||||
'pungi-config-validate = pungi.scripts.config_validate:cli_main',
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
scripts = [
|
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving",],
|
||||||
'contrib/yum-dnf-compare/pungi-compare-depsolving',
|
data_files=[
|
||||||
|
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
||||||
|
("/usr/share/pungi", glob.glob("share/*.ks")),
|
||||||
|
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
||||||
|
("/usr/share/pungi/multilib", glob.glob("share/multilib/*")),
|
||||||
],
|
],
|
||||||
data_files = [
|
test_suite="tests",
|
||||||
('/usr/share/pungi', glob.glob('share/*.xsl')),
|
install_requires=[
|
||||||
('/usr/share/pungi', glob.glob('share/*.ks')),
|
|
||||||
('/usr/share/pungi', glob.glob('share/*.dtd')),
|
|
||||||
('/usr/share/pungi/multilib', glob.glob('share/multilib/*')),
|
|
||||||
],
|
|
||||||
test_suite = "tests",
|
|
||||||
install_requires = [
|
|
||||||
"jsonschema",
|
"jsonschema",
|
||||||
"kobo",
|
"kobo",
|
||||||
"lxml",
|
"lxml",
|
||||||
"productmd>=1.23",
|
"productmd>=1.23",
|
||||||
"six",
|
"six",
|
||||||
'dogpile.cache',
|
"dogpile.cache",
|
||||||
],
|
|
||||||
extras_require={
|
|
||||||
':python_version=="2.7"': [
|
|
||||||
'enum34',
|
|
||||||
"lockfile",
|
|
||||||
'dict.sorted',
|
|
||||||
]
|
|
||||||
},
|
|
||||||
tests_require = [
|
|
||||||
"black",
|
|
||||||
"mock",
|
|
||||||
"nose",
|
|
||||||
"nose-cov",
|
|
||||||
],
|
],
|
||||||
|
extras_require={':python_version=="2.7"': ["enum34", "lockfile", "dict.sorted",]},
|
||||||
|
tests_require=["mock", "nose", "nose-cov",],
|
||||||
)
|
)
|
||||||
|
4
tox.ini
4
tox.ini
@ -1,15 +1,15 @@
|
|||||||
[flake8]
|
[flake8]
|
||||||
exclude = doc/*,*.pyc,*.py~,*.in,*.spec,*.sh,*.rst,setup.py
|
exclude = doc/*,*.pyc,*.py~,*.in,*.spec,*.sh,*.rst,setup.py
|
||||||
filename = *.py
|
filename = *.py
|
||||||
|
max-line-length = 88
|
||||||
|
|
||||||
# E402: module level import not at top of file
|
# E402: module level import not at top of file
|
||||||
# E501: line too long
|
|
||||||
# H301: one import per line
|
# H301: one import per line
|
||||||
# H306: imports not in alphabetical order
|
# H306: imports not in alphabetical order
|
||||||
# E226: missing whitespace around arithmetic operator
|
# E226: missing whitespace around arithmetic operator
|
||||||
# W503: line break occured before a binary operator
|
# W503: line break occured before a binary operator
|
||||||
# E203: whitespace before ':'
|
# E203: whitespace before ':'
|
||||||
ignore = E501,E402,H301,H306,E226,W503,E203
|
ignore = E402,H301,H306,E226,W503,E203
|
||||||
|
|
||||||
[run]
|
[run]
|
||||||
omit = tests/*
|
omit = tests/*
|
||||||
|
Loading…
Reference in New Issue
Block a user