Format code base with black

https://black.readthedocs.io/en/stable/

JIRA: COMPOSE-4086
Signed-off-by: Haibo Lin <hlin@redhat.com>
master
Haibo Lin 4 years ago
parent 38142d30ba
commit 41a629969c

@ -9,15 +9,20 @@ def get_full_version():
Find full version of Pungi: if running from git, this will return cleaned
output of `git describe`, otherwise it will look for installed version.
"""
location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
if os.path.isdir(os.path.join(location, '.git')):
location = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
if os.path.isdir(os.path.join(location, ".git")):
import subprocess
proc = subprocess.Popen(['git', '--git-dir=%s/.git' % location, 'describe', '--tags'],
stdout=subprocess.PIPE, universal_newlines=True)
proc = subprocess.Popen(
["git", "--git-dir=%s/.git" % location, "describe", "--tags"],
stdout=subprocess.PIPE,
universal_newlines=True,
)
output, _ = proc.communicate()
return re.sub(r'-1.fc\d\d?', '', output.strip().replace('pungi-', ''))
return re.sub(r"-1.fc\d\d?", "", output.strip().replace("pungi-", ""))
else:
import subprocess
proc = subprocess.Popen(
["rpm", "-q", "pungi"], stdout=subprocess.PIPE, universal_newlines=True
)
@ -25,4 +30,4 @@ def get_full_version():
if not err:
return output.rstrip()
else:
return 'unknown'
return "unknown"

@ -93,14 +93,18 @@ def split_name_arch(name_arch):
def is_excluded(package, arches, logger=None):
"""Check if package is excluded from given architectures."""
if (package.excludearch and set(package.excludearch) & set(arches)):
if package.excludearch and set(package.excludearch) & set(arches):
if logger:
logger.debug("Excluding (EXCLUDEARCH: %s): %s"
% (sorted(set(package.excludearch)), package.file_name))
logger.debug(
"Excluding (EXCLUDEARCH: %s): %s"
% (sorted(set(package.excludearch)), package.file_name)
)
return True
if (package.exclusivearch and not (set(package.exclusivearch) & set(arches))):
if package.exclusivearch and not (set(package.exclusivearch) & set(arches)):
if logger:
logger.debug("Excluding (EXCLUSIVEARCH: %s): %s"
% (sorted(set(package.exclusivearch)), package.file_name))
logger.debug(
"Excluding (EXCLUSIVEARCH: %s): %s"
% (sorted(set(package.exclusivearch)), package.file_name)
)
return True
return False

@ -12,12 +12,13 @@ import struct
_ppc64_native_is_best = True
# dict mapping arch -> ( multicompat, best personality, biarch personality )
multilibArches = {"x86_64": ("athlon", "x86_64", "athlon"),
"sparc64v": ("sparcv9v", "sparcv9v", "sparc64v"),
"sparc64": ("sparcv9", "sparcv9", "sparc64"),
"ppc64": ("ppc", "ppc", "ppc64"),
"s390x": ("s390", "s390x", "s390"),
}
multilibArches = {
"x86_64": ("athlon", "x86_64", "athlon"),
"sparc64v": ("sparcv9v", "sparcv9v", "sparc64v"),
"sparc64": ("sparcv9", "sparcv9", "sparc64"),
"ppc64": ("ppc", "ppc", "ppc64"),
"s390x": ("s390", "s390x", "s390"),
}
if _ppc64_native_is_best:
multilibArches["ppc64"] = ("ppc", "ppc64", "ppc64")
@ -29,26 +30,21 @@ arches = {
"i586": "i486",
"i486": "i386",
"i386": "noarch",
# amd64
"x86_64": "athlon",
"amd64": "x86_64",
"ia32e": "x86_64",
# ppc64le
"ppc64le": "noarch",
# ppc
"ppc64p7": "ppc64",
"ppc64pseries": "ppc64",
"ppc64iseries": "ppc64",
"ppc64": "ppc",
"ppc": "noarch",
# s390{,x}
"s390x": "s390",
"s390": "noarch",
# sparc
"sparc64v": "sparcv9v",
"sparc64": "sparcv9",
@ -56,7 +52,6 @@ arches = {
"sparcv9": "sparcv8",
"sparcv8": "sparc",
"sparc": "noarch",
# alpha
"alphaev7": "alphaev68",
"alphaev68": "alphaev67",
@ -68,29 +63,23 @@ arches = {
"alphaev45": "alphaev4",
"alphaev4": "alpha",
"alpha": "noarch",
# arm
"armv7l": "armv6l",
"armv6l": "armv5tejl",
"armv5tejl": "armv5tel",
"armv5tel": "noarch",
# arm hardware floating point
"armv7hnl": "armv7hl",
"armv7hl": "armv6hl",
"armv6hl": "noarch",
# arm64
"arm64": "noarch",
# aarch64
"aarch64": "noarch",
# super-h
"sh4a": "sh4",
"sh4": "noarch",
"sh3": "noarch",
# itanium
"ia64": "noarch",
}
@ -137,7 +126,7 @@ def getArchList(thisarch=None): # pragma: no cover
# if we're a weirdo arch - add noarch on there.
if len(archlist) == 1 and archlist[0] == thisarch:
archlist.append('noarch')
archlist.append("noarch")
return archlist
@ -208,10 +197,10 @@ def getCanonX86Arch(arch): # pragma: no cover
def getCanonARMArch(arch): # pragma: no cover
# the %{_target_arch} macro in rpm will let us know the abi we are using
target = rpm.expandMacro('%{_target_cpu}')
if target.startswith('armv6h'):
target = rpm.expandMacro("%{_target_cpu}")
if target.startswith("armv6h"):
return target
if target.startswith('armv7h'):
if target.startswith("armv7h"):
return target
return arch
@ -224,7 +213,7 @@ def getCanonPPCArch(arch): # pragma: no cover
machine = None
for line in _try_read_cpuinfo():
if line.find("machine") != -1:
machine = line.split(':')[1]
machine = line.split(":")[1]
break
platform = _aux_vector["platform"]
@ -232,7 +221,7 @@ def getCanonPPCArch(arch): # pragma: no cover
return arch
try:
if platform.startswith("power") and int(platform[5:].rstrip('+')) >= 7:
if platform.startswith("power") and int(platform[5:].rstrip("+")) >= 7:
return "ppc64p7"
except:
pass
@ -252,7 +241,7 @@ def getCanonSPARCArch(arch): # pragma: no cover
SPARCtype = None
for line in _try_read_cpuinfo():
if line.startswith("type"):
SPARCtype = line.split(':')[1]
SPARCtype = line.split(":")[1]
break
if SPARCtype is None:
return arch
@ -279,7 +268,7 @@ def getCanonX86_64Arch(arch): # pragma: no cover
vendor = None
for line in _try_read_cpuinfo():
if line.startswith("vendor_id"):
vendor = line.split(':')[1]
vendor = line.split(":")[1]
break
if vendor is None:
return arch
@ -308,7 +297,7 @@ def getCanonArch(skipRpmPlatform=0): # pragma: no cover
_parse_auxv()
if (len(arch) == 4 and arch[0] == "i" and arch[2:4] == "86"):
if len(arch) == 4 and arch[0] == "i" and arch[2:4] == "86":
return getCanonX86Arch(arch)
if arch.startswith("arm"):
@ -370,7 +359,7 @@ def getBaseArch(myarch=None): # pragma: no cover
if myarch in arches:
basearch = myarch
value = arches[basearch]
while value != 'noarch':
while value != "noarch":
basearch = value
value = arches[basearch]

File diff suppressed because it is too large Load Diff

@ -15,7 +15,6 @@
class OptionsBase(object):
def __init__(self, **kwargs):
"""
inherit and initialize attributes
@ -29,5 +28,7 @@ class OptionsBase(object):
"""
for key, value in kwargs.items():
if not hasattr(self, key):
raise ValueError("Invalid option in %s: %s" % (self.__class__.__name__, key))
raise ValueError(
"Invalid option in %s: %s" % (self.__class__.__name__, key)
)
setattr(self, key, value)

@ -14,9 +14,7 @@
# along with this program; if not, see <https://gnu.org/licenses/>.
__all__ = (
"Compose",
)
__all__ = ("Compose",)
import errno
@ -38,7 +36,10 @@ from pungi.wrappers.variants import VariantsXmlParser
from pungi.paths import Paths
from pungi.wrappers.scm import get_file_from_scm
from pungi.util import (
makedirs, get_arch_variant_data, get_format_substs, get_variant_data
makedirs,
get_arch_variant_data,
get_format_substs,
get_variant_data,
)
from pungi.metadata import compose_to_composeinfo
@ -50,7 +51,15 @@ except ImportError:
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, compose_respin=None, compose_label=None, already_exists_callbacks=None):
def get_compose_dir(
topdir,
conf,
compose_type="production",
compose_date=None,
compose_respin=None,
compose_label=None,
already_exists_callbacks=None,
):
already_exists_callbacks = already_exists_callbacks or []
# create an incomplete composeinfo to generate compose ID
@ -107,7 +116,18 @@ def get_compose_dir(topdir, conf, compose_type="production", compose_date=None,
class Compose(kobo.log.LoggingBase):
def __init__(self, conf, topdir, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None, notifier=None):
def __init__(
self,
conf,
topdir,
skip_phases=None,
just_phases=None,
old_composes=None,
koji_event=None,
supported=False,
logger=None,
notifier=None,
):
kobo.log.LoggingBase.__init__(self, logger)
# TODO: check if minimal conf values are set
self.conf = conf
@ -128,18 +148,27 @@ class Compose(kobo.log.LoggingBase):
# Set up logging to file
if logger:
kobo.log.add_file_logger(logger, self.paths.log.log_file("global", "pungi.log"))
kobo.log.add_file_logger(logger, self.paths.log.log_file("global", "excluding-arch.log"))
kobo.log.add_file_logger(
logger, self.paths.log.log_file("global", "pungi.log")
)
kobo.log.add_file_logger(
logger, self.paths.log.log_file("global", "excluding-arch.log")
)
class PungiLogFilter(logging.Filter):
def filter(self, record):
return False if record.funcName and record.funcName == 'is_excluded' else True
return (
False
if record.funcName and record.funcName == "is_excluded"
else True
)
class ExcludingArchLogFilter(logging.Filter):
def filter(self, record):
message = record.getMessage()
if 'Populating package set for arch:' in message or \
(record.funcName and record.funcName == 'is_excluded'):
if "Populating package set for arch:" in message or (
record.funcName and record.funcName == "is_excluded"
):
return True
else:
return False
@ -147,18 +176,26 @@ class Compose(kobo.log.LoggingBase):
for handler in logger.handlers:
if isinstance(handler, logging.FileHandler):
log_file_name = os.path.basename(handler.stream.name)
if log_file_name == 'pungi.global.log':
if log_file_name == "pungi.global.log":
handler.addFilter(PungiLogFilter())
elif log_file_name == 'excluding-arch.global.log':
elif log_file_name == "excluding-arch.global.log":
handler.addFilter(ExcludingArchLogFilter())
# to provide compose_id, compose_date and compose_respin
self.ci_base = ComposeInfo()
self.ci_base.load(os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json"))
self.ci_base.load(
os.path.join(self.paths.work.topdir(arch="global"), "composeinfo-base.json")
)
self.supported = supported
if self.compose_label and self.compose_label.split("-")[0] in SUPPORTED_MILESTONES:
self.log_info("Automatically setting 'supported' flag due to label: %s." % self.compose_label)
if (
self.compose_label
and self.compose_label.split("-")[0] in SUPPORTED_MILESTONES
):
self.log_info(
"Automatically setting 'supported' flag due to label: %s."
% self.compose_label
)
self.supported = True
self.im = Images()
@ -179,10 +216,10 @@ class Compose(kobo.log.LoggingBase):
self.cache_region = make_region().configure(
self.conf.get("dogpile_cache_backend"),
expiration_time=self.conf.get("dogpile_cache_expiration_time", 3600),
arguments=self.conf.get("dogpile_cache_arguments", {})
arguments=self.conf.get("dogpile_cache_arguments", {}),
)
else:
self.cache_region = make_region().configure('dogpile.cache.null')
self.cache_region = make_region().configure("dogpile.cache.null")
get_compose_dir = staticmethod(get_compose_dir)
@ -234,10 +271,10 @@ class Compose(kobo.log.LoggingBase):
"""Explicit configuration trumps all. Otherwise check gather backend
and only create it for Yum.
"""
config = self.conf.get('createrepo_database')
config = self.conf.get("createrepo_database")
if config is not None:
return config
return self.conf['gather_backend'] == 'yum'
return self.conf["gather_backend"] == "yum"
def read_variants(self):
# TODO: move to phases/init ?
@ -263,7 +300,9 @@ class Compose(kobo.log.LoggingBase):
tree_arches = self.conf.get("tree_arches", None)
tree_variants = self.conf.get("tree_variants", None)
with open(variants_file, "r") as file_obj:
parser = VariantsXmlParser(file_obj, tree_arches, tree_variants, logger=self._logger)
parser = VariantsXmlParser(
file_obj, tree_arches, tree_variants, logger=self._logger
)
self.variants = parser.parse()
self.all_variants = {}
@ -294,21 +333,28 @@ class Compose(kobo.log.LoggingBase):
@property
def status_file(self):
"""Path to file where the compose status will be stored."""
if not hasattr(self, '_status_file'):
self._status_file = os.path.join(self.topdir, 'STATUS')
if not hasattr(self, "_status_file"):
self._status_file = os.path.join(self.topdir, "STATUS")
return self._status_file
def _log_failed_deliverables(self):
for kind, data in self.failed_deliverables.items():
for variant, arch, subvariant in data:
self.log_info('Failed %s on variant <%s>, arch <%s>, subvariant <%s>.'
% (kind, variant, arch, subvariant))
log = os.path.join(self.paths.log.topdir('global'), 'deliverables.json')
with open(log, 'w') as f:
json.dump({'required': self.required_deliverables,
'failed': self.failed_deliverables,
'attempted': self.attempted_deliverables},
f, indent=4)
self.log_info(
"Failed %s on variant <%s>, arch <%s>, subvariant <%s>."
% (kind, variant, arch, subvariant)
)
log = os.path.join(self.paths.log.topdir("global"), "deliverables.json")
with open(log, "w") as f:
json.dump(
{
"required": self.required_deliverables,
"failed": self.failed_deliverables,
"attempted": self.attempted_deliverables,
},
f,
indent=4,
)
def write_status(self, stat_msg):
if stat_msg not in ("STARTED", "FINISHED", "DOOMED", "TERMINATED"):
@ -321,8 +367,8 @@ class Compose(kobo.log.LoggingBase):
self.log_error(msg)
raise RuntimeError(msg)
if stat_msg == 'FINISHED' and self.failed_deliverables:
stat_msg = 'FINISHED_INCOMPLETE'
if stat_msg == "FINISHED" and self.failed_deliverables:
stat_msg = "FINISHED_INCOMPLETE"
self._log_failed_deliverables()
@ -330,21 +376,22 @@ class Compose(kobo.log.LoggingBase):
f.write(stat_msg + "\n")
if self.notifier:
self.notifier.send('status-change', status=stat_msg)
self.notifier.send("status-change", status=stat_msg)
def get_status(self):
if not os.path.isfile(self.status_file):
return
return open(self.status_file, "r").read().strip()
def get_image_name(self, arch, variant, disc_type='dvd',
disc_num=1, suffix='.iso', format=None):
def get_image_name(
self, arch, variant, disc_type="dvd", disc_num=1, suffix=".iso", format=None
):
"""Create a filename for image with given parameters.
:raises RuntimeError: when unknown ``disc_type`` is given
"""
default_format = "{compose_id}-{variant}-{arch}-{disc_type}{disc_num}{suffix}"
format = format or self.conf.get('image_name_format', default_format)
format = format or self.conf.get("image_name_format", default_format)
if isinstance(format, dict):
conf = get_variant_data(self.conf, "image_name_format", variant)
@ -359,47 +406,54 @@ class Compose(kobo.log.LoggingBase):
disc_num = ""
kwargs = {
'arch': arch,
'disc_type': disc_type,
'disc_num': disc_num,
'suffix': suffix
"arch": arch,
"disc_type": disc_type,
"disc_num": disc_num,
"suffix": suffix,
}
if variant.type == "layered-product":
variant_uid = variant.parent.uid
kwargs['compose_id'] = self.ci_base[variant.uid].compose_id
kwargs["compose_id"] = self.ci_base[variant.uid].compose_id
else:
variant_uid = variant.uid
args = get_format_substs(self, variant=variant_uid, **kwargs)
try:
return (format % args).format(**args)
except KeyError as err:
raise RuntimeError('Failed to create image name: unknown format element: %s' % err)
raise RuntimeError(
"Failed to create image name: unknown format element: %s" % err
)
def can_fail(self, variant, arch, deliverable):
"""Figure out if deliverable can fail on variant.arch.
Variant can be None.
"""
failable = get_arch_variant_data(self.conf, 'failable_deliverables', arch, variant)
failable = get_arch_variant_data(
self.conf, "failable_deliverables", arch, variant
)
return deliverable in failable
def attempt_deliverable(self, variant, arch, kind, subvariant=None):
"""Log information about attempted deliverable."""
variant_uid = variant.uid if variant else ''
variant_uid = variant.uid if variant else ""
self.attempted_deliverables.setdefault(kind, []).append(
(variant_uid, arch, subvariant))
(variant_uid, arch, subvariant)
)
def require_deliverable(self, variant, arch, kind, subvariant=None):
"""Log information about attempted deliverable."""
variant_uid = variant.uid if variant else ''
variant_uid = variant.uid if variant else ""
self.required_deliverables.setdefault(kind, []).append(
(variant_uid, arch, subvariant))
(variant_uid, arch, subvariant)
)
def fail_deliverable(self, variant, arch, kind, subvariant=None):
"""Log information about failed deliverable."""
variant_uid = variant.uid if variant else ''
variant_uid = variant.uid if variant else ""
self.failed_deliverables.setdefault(kind, []).append(
(variant_uid, arch, subvariant))
(variant_uid, arch, subvariant)
)
@property
def image_release(self):
@ -409,11 +463,14 @@ class Compose(kobo.log.LoggingBase):
otherwise we will create a string with date, compose type and respin.
"""
if self.compose_label:
milestone, release = self.compose_label.split('-')
milestone, release = self.compose_label.split("-")
return release
return '%s%s.%s' % (self.compose_date, self.ci_base.compose.type_suffix,
self.compose_respin)
return "%s%s.%s" % (
self.compose_date,
self.ci_base.compose.type_suffix,
self.compose_respin,
)
@property
def image_version(self):
@ -423,9 +480,9 @@ class Compose(kobo.log.LoggingBase):
milestone from it is appended to the version (unless it is RC).
"""
version = self.ci_base.release.version
if self.compose_label and not self.compose_label.startswith('RC-'):
milestone, release = self.compose_label.split('-')
return '%s_%s' % (version, milestone)
if self.compose_label and not self.compose_label.startswith("RC-"):
milestone, release = self.compose_label.split("-")
return "%s_%s" % (version, milestone)
return version
@ -451,7 +508,7 @@ def get_ordered_variant_uids(compose):
setattr(
compose,
"_ordered_variant_uids",
unordered_variant_uids + ordered_variant_uids
unordered_variant_uids + ordered_variant_uids,
)
return getattr(compose, "_ordered_variant_uids")
@ -469,7 +526,9 @@ def _prepare_variant_as_lookaside(compose):
try:
graph.add_edge(variant, lookaside_variant)
except ValueError as e:
raise ValueError("There is a bad configuration in 'variant_as_lookaside': %s" % e)
raise ValueError(
"There is a bad configuration in 'variant_as_lookaside': %s" % e
)
variant_processing_order = reversed(graph.prune_graph())
return list(variant_processing_order)

@ -42,9 +42,12 @@ def write_discinfo(file_path, description, arch, disc_numbers=None, timestamp=No
"""
disc_numbers = disc_numbers or ["ALL"]
if not isinstance(disc_numbers, list):
raise TypeError("Invalid type: disc_numbers type is %s; expected: <list>" % type(disc_numbers))
raise TypeError(
"Invalid type: disc_numbers type is %s; expected: <list>"
% type(disc_numbers)
)
if not timestamp:
timestamp = os.environ.get('SOURCE_DATE_EPOCH', "%f" % time.time())
timestamp = os.environ.get("SOURCE_DATE_EPOCH", "%f" % time.time())
with open(file_path, "w") as f:
f.write("%s\n" % timestamp)
f.write("%s\n" % description)

@ -21,51 +21,58 @@ import time
from ConfigParser import SafeConfigParser
from .arch_utils import getBaseArch
# In development, `here` will point to the bin/ directory with scripts.
here = sys.path[0]
MULTILIBCONF = (os.path.join(os.path.dirname(__file__), '..', 'share', 'multilib')
if here != '/usr/bin'
else '/usr/share/pungi/multilib')
MULTILIBCONF = (
os.path.join(os.path.dirname(__file__), "..", "share", "multilib")
if here != "/usr/bin"
else "/usr/share/pungi/multilib"
)
class Config(SafeConfigParser):
def __init__(self, pungirc=None):
SafeConfigParser.__init__(self)
self.add_section('pungi')
self.add_section('lorax')
self.add_section("pungi")
self.add_section("lorax")
self.set('pungi', 'osdir', 'os')
self.set('pungi', 'sourcedir', 'source')
self.set('pungi', 'debugdir', 'debug')
self.set('pungi', 'isodir', 'iso')
self.set('pungi', 'multilibconf', MULTILIBCONF)
self.set('pungi', 'relnotefilere', 'LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG')
self.set('pungi', 'relnotedirre', '')
self.set('pungi', 'relnotepkgs', 'fedora-repos fedora-release fedora-release-notes')
self.set('pungi', 'product_path', 'Packages')
self.set('pungi', 'cachedir', '/var/cache/pungi')
self.set('pungi', 'compress_type', 'xz')
self.set('pungi', 'arch', getBaseArch())
self.set('pungi', 'family', 'Fedora')
self.set('pungi', 'iso_basename', 'Fedora')
self.set('pungi', 'version', time.strftime('%Y%m%d', time.localtime()))
self.set('pungi', 'variant', '')
self.set('pungi', 'destdir', os.getcwd())
self.set('pungi', 'workdirbase', "/work")
self.set('pungi', 'bugurl', 'https://bugzilla.redhat.com')
self.set('pungi', 'cdsize', '695.0')
self.set('pungi', 'debuginfo', "True")
self.set('pungi', 'alldeps', "True")
self.set('pungi', 'isfinal', "False")
self.set('pungi', 'nohash', "False")
self.set('pungi', 'full_archlist', "False")
self.set('pungi', 'multilib', '')
self.set('pungi', 'lookaside_repos', '')
self.set('pungi', 'resolve_deps', "True")
self.set('pungi', 'no_dvd', "False")
self.set('pungi', 'nomacboot', "False")
self.set('pungi', 'rootfs_size', "False")
self.set("pungi", "osdir", "os")
self.set("pungi", "sourcedir", "source")
self.set("pungi", "debugdir", "debug")
self.set("pungi", "isodir", "iso")
self.set("pungi", "multilibconf", MULTILIBCONF)
self.set(
"pungi", "relnotefilere", "LICENSE README-BURNING-ISOS-en_US.txt ^RPM-GPG"
)
self.set("pungi", "relnotedirre", "")
self.set(
"pungi", "relnotepkgs", "fedora-repos fedora-release fedora-release-notes"
)
self.set("pungi", "product_path", "Packages")
self.set("pungi", "cachedir", "/var/cache/pungi")
self.set("pungi", "compress_type", "xz")
self.set("pungi", "arch", getBaseArch())
self.set("pungi", "family", "Fedora")
self.set("pungi", "iso_basename", "Fedora")
self.set("pungi", "version", time.strftime("%Y%m%d", time.localtime()))
self.set("pungi", "variant", "")
self.set("pungi", "destdir", os.getcwd())
self.set("pungi", "workdirbase", "/work")
self.set("pungi", "bugurl", "https://bugzilla.redhat.com")
self.set("pungi", "cdsize", "695.0")
self.set("pungi", "debuginfo", "True")
self.set("pungi", "alldeps", "True")
self.set("pungi", "isfinal", "False")
self.set("pungi", "nohash", "False")
self.set("pungi", "full_archlist", "False")
self.set("pungi", "multilib", "")
self.set("pungi", "lookaside_repos", "")
self.set("pungi", "resolve_deps", "True")
self.set("pungi", "no_dvd", "False")
self.set("pungi", "nomacboot", "False")
self.set("pungi", "rootfs_size", "False")
# if missing, self.read() is a noop, else change 'defaults'
if pungirc:

@ -11,10 +11,21 @@ from .wrappers import iso
from .wrappers.jigdo import JigdoWrapper
CreateIsoOpts = namedtuple('CreateIsoOpts',
['buildinstall_method', 'arch', 'output_dir', 'jigdo_dir',
'iso_name', 'volid', 'graft_points', 'supported', 'os_tree',
"hfs_compat"])
CreateIsoOpts = namedtuple(
"CreateIsoOpts",
[
"buildinstall_method",
"arch",
"output_dir",
"jigdo_dir",
"iso_name",
"volid",
"graft_points",
"supported",
"os_tree",
"hfs_compat",
],
)
CreateIsoOpts.__new__.__defaults__ = (None,) * len(CreateIsoOpts._fields)
@ -22,8 +33,8 @@ def quote(str):
"""Quote an argument for shell, but make sure $TEMPLATE variable will be
expanded.
"""
if str.startswith('$TEMPLATE'):
return '$TEMPLATE%s' % shlex_quote(str.replace('$TEMPLATE', '', 1))
if str.startswith("$TEMPLATE"):
return "$TEMPLATE%s" % shlex_quote(str.replace("$TEMPLATE", "", 1))
return shlex_quote(str)
@ -32,38 +43,46 @@ def emit(f, cmd):
if isinstance(cmd, six.string_types):
print(cmd, file=f)
else:
print(' '.join([quote(x) for x in cmd]), file=f)
print(" ".join([quote(x) for x in cmd]), file=f)
FIND_TEMPLATE_SNIPPET = """
if ! TEMPLATE="$($(head -n1 $(which lorax) | cut -c3-) -c 'import pylorax; print(pylorax.find_templates())')"; then
TEMPLATE=/usr/share/lorax;
fi
""".replace('\n', '')
""".replace(
"\n", ""
)
def make_image(f, opts):
mkisofs_kwargs = {}
if opts.buildinstall_method:
if opts.buildinstall_method == 'lorax':
if opts.buildinstall_method == "lorax":
emit(f, FIND_TEMPLATE_SNIPPET)
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
opts.arch,
os.path.join("$TEMPLATE", "config_files/ppc"),
hfs_compat=opts.hfs_compat,
)
elif opts.buildinstall_method == 'buildinstall':
elif opts.buildinstall_method == "buildinstall":
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
opts.arch, "/usr/lib/anaconda-runtime/boot")
opts.arch, "/usr/lib/anaconda-runtime/boot"
)
# ppc(64) doesn't seem to support utf-8
if opts.arch in ("ppc", "ppc64", "ppc64le"):
mkisofs_kwargs["input_charset"] = None
cmd = iso.get_mkisofs_cmd(opts.iso_name, None, volid=opts.volid,
exclude=["./lost+found"],
graft_points=opts.graft_points, **mkisofs_kwargs)
cmd = iso.get_mkisofs_cmd(
opts.iso_name,
None,
volid=opts.volid,
exclude=["./lost+found"],
graft_points=opts.graft_points,
**mkisofs_kwargs
)
emit(f, cmd)
@ -88,22 +107,20 @@ def make_manifest(f, opts):
def make_jigdo(f, opts):
jigdo = JigdoWrapper()
files = [
{
"path": opts.os_tree,
"label": None,
"uri": None,
}
]
cmd = jigdo.get_jigdo_cmd(os.path.join(opts.output_dir, opts.iso_name),
files, output_dir=opts.jigdo_dir,
no_servers=True, report="noprogress")
files = [{"path": opts.os_tree, "label": None, "uri": None}]
cmd = jigdo.get_jigdo_cmd(
os.path.join(opts.output_dir, opts.iso_name),
files,
output_dir=opts.jigdo_dir,
no_servers=True,
report="noprogress",
)
emit(f, cmd)
def write_script(opts, f):
if bool(opts.jigdo_dir) != bool(opts.os_tree):
raise RuntimeError('jigdo_dir must be used together with os_tree')
raise RuntimeError("jigdo_dir must be used together with os_tree")
emit(f, "#!/bin/bash")
emit(f, "set -ex")

@ -42,8 +42,8 @@ class Substitutions(dict):
# DNF version of Substitutions detects host arch. We don't want that.
def __init__(self, arch):
super(Substitutions, self).__init__()
self['arch'] = arch
self['basearch'] = dnf_arch.basearch(arch)
self["arch"] = arch
self["basearch"] = dnf_arch.basearch(arch)
class DnfWrapper(dnf.Base):
@ -52,8 +52,9 @@ class DnfWrapper(dnf.Base):
self.arch_wrapper = ArchWrapper(self.conf.substitutions["arch"])
self.comps_wrapper = CompsWrapper(self)
def add_repo(self, repoid, baseurl=None, enablegroups=True, lookaside=False,
**kwargs):
def add_repo(
self, repoid, baseurl=None, enablegroups=True, lookaside=False, **kwargs
):
self.repos.add_new_repo(
repoid,
self.conf,
@ -83,7 +84,13 @@ class CompsWrapper(object):
result[i.id] = i
return result
def get_packages_from_group(self, group_id, include_default=True, include_optional=True, include_conditional=True):
def get_packages_from_group(
self,
group_id,
include_default=True,
include_optional=True,
include_conditional=True,
):
packages = []
conditional = []
@ -117,9 +124,11 @@ class CompsWrapper(object):
continue
include_default = group_include in (1, 2)
include_optional = group_include in (2, )
include_optional = group_include in (2,)
include_conditional = True
pkgs, cond = self.get_packages_from_group(group_id, include_default, include_optional, include_conditional)
pkgs, cond = self.get_packages_from_group(
group_id, include_default, include_optional, include_conditional
)
packages.update(pkgs)
for i in cond:
if i not in conditional:
@ -136,7 +145,11 @@ class CompsWrapper(object):
class ArchWrapper(object):
def __init__(self, arch):
self.base_arch = dnf_arch.basearch(arch)
self.all_arches = pungi.arch.get_valid_arches(self.base_arch, multilib=True, add_noarch=True)
self.native_arches = pungi.arch.get_valid_arches(self.base_arch, multilib=False, add_noarch=True)
self.all_arches = pungi.arch.get_valid_arches(
self.base_arch, multilib=True, add_noarch=True
)
self.native_arches = pungi.arch.get_valid_arches(
self.base_arch, multilib=False, add_noarch=True
)
self.multilib_arches = pungi.arch.get_valid_multilib_arches(self.base_arch)
self.source_arches = ["src", "nosrc"]

File diff suppressed because it is too large Load Diff

@ -32,7 +32,7 @@ from pungi.util import DEBUG_PATTERNS
def get_source_name(pkg):
# Workaround for rhbz#1418298
return pkg.sourcerpm.rsplit('-', 2)[0]
return pkg.sourcerpm.rsplit("-", 2)[0]
class GatherOptions(pungi.common.OptionsBase):
@ -79,21 +79,21 @@ class GatherOptions(pungi.common.OptionsBase):
def __str__(self):
lines = [
'fulltree=%s' % self.fulltree,
'fulltree_excludes=%d items' % len(self.fulltree_excludes),
'resolve_deps=%s' % self.resolve_deps,
'selfhosting=%s' % self.selfhosting,
'greedy_method=%s' % self.greedy_method,
'langpacks=%s' % self.langpacks,
'multilib_methods=%s' % self.multilib_methods,
'multilib_blacklist=%d items' % len(self.multilib_blacklist),
'multilib_whitelist=%d items' % len(self.multilib_whitelist),
'lookaside_repos=%s' % self.lookaside_repos,
'prepopulate=%d items' % len(self.prepopulate),
'exclude_source=%s' % self.exclude_source,
'exclude_debug=%s' % self.exclude_debug
"fulltree=%s" % self.fulltree,
"fulltree_excludes=%d items" % len(self.fulltree_excludes),
"resolve_deps=%s" % self.resolve_deps,
"selfhosting=%s" % self.selfhosting,
"greedy_method=%s" % self.greedy_method,
"langpacks=%s" % self.langpacks,
"multilib_methods=%s" % self.multilib_methods,
"multilib_blacklist=%d items" % len(self.multilib_blacklist),
"multilib_whitelist=%d items" % len(self.multilib_whitelist),
"lookaside_repos=%s" % self.lookaside_repos,
"prepopulate=%d items" % len(self.prepopulate),
"exclude_source=%s" % self.exclude_source,
"exclude_debug=%s" % self.exclude_debug,
]
return '[\n%s\n]' % '\n'.join(' ' + l for l in lines)
return "[\n%s\n]" % "\n".join(" " + l for l in lines)
class QueryCache(object):
@ -142,7 +142,9 @@ class GatherBase(object):
# lookaside.
# source packages
self.q_source_packages = q.filter(arch=self.dnf.arch_wrapper.source_arches).apply()
self.q_source_packages = q.filter(
arch=self.dnf.arch_wrapper.source_arches
).apply()
q = q.difference(self.q_source_packages)
# filter arches
@ -191,8 +193,12 @@ class Gather(GatherBase):
if not self.logger.handlers:
# default logging handler
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S"))
handler.setFormatter(
logging.Formatter(
"%(asctime)s [%(levelname)-8s] %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
)
handler.setLevel(logging.DEBUG)
self.logger.addHandler(handler)
@ -202,22 +208,23 @@ class Gather(GatherBase):
self.dnf._sack,
gather_options.multilib_methods,
blacklist=self.opts.multilib_blacklist,
whitelist=self.opts.multilib_whitelist)
whitelist=self.opts.multilib_whitelist,
)
# already processed packages
self.finished_add_binary_package_deps = {} # {pkg: [deps]}
self.finished_add_debug_package_deps = {} # {pkg: [deps]}
self.finished_add_source_package_deps = {} # {pkg: [deps]}
self.finished_add_binary_package_deps = {} # {pkg: [deps]}
self.finished_add_debug_package_deps = {} # {pkg: [deps]}
self.finished_add_source_package_deps = {} # {pkg: [deps]}
self.finished_get_package_deps_reqs = {}
self.finished_add_conditional_packages = {} # {pkg: [pkgs]}
self.finished_add_source_packages = {} # {pkg: src-pkg|None}
self.sourcerpm_cache = {} # {src_nvra: src-pkg|None}
self.finished_add_debug_packages = {} # {pkg: [debug-pkgs]}
self.finished_add_fulltree_packages = {} # {pkg: [pkgs]}
self.finished_add_langpack_packages = {} # {pkg: [pkgs]}
self.finished_add_multilib_packages = {} # {pkg: pkg|None}
self.finished_add_conditional_packages = {} # {pkg: [pkgs]}
self.finished_add_source_packages = {} # {pkg: src-pkg|None}
self.sourcerpm_cache = {} # {src_nvra: src-pkg|None}
self.finished_add_debug_packages = {} # {pkg: [debug-pkgs]}
self.finished_add_fulltree_packages = {} # {pkg: [pkgs]}
self.finished_add_langpack_packages = {} # {pkg: [pkgs]}
self.finished_add_multilib_packages = {} # {pkg: pkg|None}
# result
self.result_binary_packages = set()
@ -254,11 +261,17 @@ class Gather(GatherBase):
all_pkgs.append(pkg)
if not debuginfo:
native_pkgs = set(self.q_native_binary_packages.filter(pkg=all_pkgs).apply())
multilib_pkgs = set(self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply())
native_pkgs = set(
self.q_native_binary_packages.filter(pkg=all_pkgs).apply()
)
multilib_pkgs = set(
self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply()
)
else:
native_pkgs = set(self.q_native_debug_packages.filter(pkg=all_pkgs).apply())
multilib_pkgs = set(self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply())
multilib_pkgs = set(
self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply()
)
result = set()
@ -307,7 +320,7 @@ class Gather(GatherBase):
version=pkg.version,
release=pkg.release,
arch=pkg.arch,
reponame=self.opts.lookaside_repos
reponame=self.opts.lookaside_repos,
)
return pkg in pkgs
@ -328,7 +341,7 @@ class Gather(GatherBase):
# lookaside
if self.is_from_lookaside(i):
self._set_flag(i, PkgFlag.lookaside)
if i.sourcerpm.rsplit('-', 2)[0] in self.opts.fulltree_excludes:
if i.sourcerpm.rsplit("-", 2)[0] in self.opts.fulltree_excludes:
self._set_flag(i, PkgFlag.fulltree_exclude)
def _get_package_deps(self, pkg, debuginfo=False):
@ -350,8 +363,8 @@ class Gather(GatherBase):
# empty.
requires = (
pkg.requires
+ getattr(pkg, 'requires_pre', [])
+ getattr(pkg, 'requires_post', [])
+ getattr(pkg, "requires_pre", [])
+ getattr(pkg, "requires_post", [])
)
q = queue.filter(provides=requires).apply()
@ -378,7 +391,9 @@ class Gather(GatherBase):
"""Given an name of a queue (stored as attribute in `self`), exclude
all given packages and keep only the latest per package name and arch.
"""
setattr(self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply())
setattr(
self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply()
)
@Profiler("Gather._apply_excludes()")
def _apply_excludes(self, excludes):
@ -395,20 +410,22 @@ class Gather(GatherBase):
with Profiler("Gather._apply_excludes():exclude"):
if pattern.endswith(".+"):
pkgs = self.q_multilib_binary_packages.filter(
name__glob=pattern[:-2], arch__neq='noarch',
reponame__neq=self.opts.lookaside_repos)
name__glob=pattern[:-2],
arch__neq="noarch",
reponame__neq=self.opts.lookaside_repos,
)
elif pattern.endswith(".src"):
pkgs = self.q_source_packages.filter(
name__glob=pattern[:-4],
reponame__neq=self.opts.lookaside_repos)
name__glob=pattern[:-4], reponame__neq=self.opts.lookaside_repos
)
elif pungi.util.pkg_is_debug(pattern):
pkgs = self.q_debug_packages.filter(
name__glob=pattern,
reponame__neq=self.opts.lookaside_repos)
name__glob=pattern, reponame__neq=self.opts.lookaside_repos
)
else:
pkgs = self.q_binary_packages.filter(
name__glob=pattern,
reponame__neq=self.opts.lookaside_repos)
name__glob=pattern, reponame__neq=self.opts.lookaside_repos
)
exclude.update(pkgs)
self.logger.debug("EXCLUDED by %s: %s", pattern, [str(p) for p in pkgs])
@ -417,15 +434,22 @@ class Gather(GatherBase):
for pattern in self.opts.multilib_blacklist:
with Profiler("Gather._apply_excludes():exclude-multilib-blacklist"):
# TODO: does whitelist affect this in any way?
pkgs = self.q_multilib_binary_packages.filter(name__glob=pattern, arch__neq='noarch')
pkgs = self.q_multilib_binary_packages.filter(
name__glob=pattern, arch__neq="noarch"
)
exclude.update(pkgs)
self.logger.debug("EXCLUDED by %s: %s", pattern, [str(p) for p in pkgs])
self.dnf._sack.add_excludes(pkgs)
all_queues = ['q_binary_packages', 'q_native_binary_packages',
'q_multilib_binary_packages', 'q_noarch_binary_packages',
'q_source_packages', 'q_native_debug_packages',
'q_multilib_debug_packages']
all_queues = [
"q_binary_packages",
"q_native_binary_packages",
"q_multilib_binary_packages",
"q_noarch_binary_packages",
"q_source_packages",
"q_native_debug_packages",
"q_multilib_debug_packages",
]
with Profiler("Gather._apply_excludes():exclude-queries"):
for queue in all_queues:
@ -449,10 +473,14 @@ class Gather(GatherBase):
for pattern in includes:
with Profiler("Gather.add_initial_packages():include"):
if pattern == "system-release" and self.opts.greedy_method == "all":
pkgs = self.q_binary_packages.filter(provides="system-release").apply()
pkgs = self.q_binary_packages.filter(
provides="system-release"
).apply()
else:
if pattern.endswith(".+"):
pkgs = self.q_multilib_binary_packages.filter(name__glob=pattern[:-2]).apply()
pkgs = self.q_multilib_binary_packages.filter(
name__glob=pattern[:-2]
).apply()
else:
pkgs = self.q_binary_packages.filter(name__glob=pattern).apply()
@ -482,19 +510,37 @@ class Gather(GatherBase):
# Must be executed *after* add_initial_packages() to exclude packages properly.
# source
self.source_pkgs_cache = QueryCache(self.q_source_packages, "name", "version", "release")
self.source_pkgs_cache = QueryCache(
self.q_source_packages, "name", "version", "release"
)
# debug
self.native_debug_packages_cache = QueryCache(self.q_native_debug_packages, "sourcerpm")
self.multilib_debug_packages_cache = QueryCache(self.q_multilib_debug_packages, "sourcerpm")
self.native_debug_packages_cache = QueryCache(
self.q_native_debug_packages, "sourcerpm"
)
self.multilib_debug_packages_cache = QueryCache(
self.q_multilib_debug_packages, "sourcerpm"
)
# packages by sourcerpm
self.q_native_pkgs_by_sourcerpm_cache = QueryCache(self.q_native_binary_packages, "sourcerpm", arch__neq="noarch")
self.q_multilib_pkgs_by_sourcerpm_cache = QueryCache(self.q_multilib_binary_packages, "sourcerpm", arch__neq="noarch")
self.q_noarch_pkgs_by_sourcerpm_cache = QueryCache(self.q_native_binary_packages, "sourcerpm", arch="noarch")
self.q_native_pkgs_by_sourcerpm_cache = QueryCache(
self.q_native_binary_packages, "sourcerpm", arch__neq="noarch"
)
self.q_multilib_pkgs_by_sourcerpm_cache = QueryCache(
self.q_multilib_binary_packages, "sourcerpm", arch__neq="noarch"
)
self.q_noarch_pkgs_by_sourcerpm_cache = QueryCache(
self.q_native_binary_packages, "sourcerpm", arch="noarch"
)
# multilib
self.q_multilib_binary_packages_cache = QueryCache(self.q_multilib_binary_packages, "name", "version", "release", arch__neq="noarch")
self.q_multilib_binary_packages_cache = QueryCache(
self.q_multilib_binary_packages,
"name",
"version",
"release",
arch__neq="noarch",
)
# prepopulate
self.prepopulate_cache = QueryCache(self.q_binary_packages, "name", "arch")
@ -531,7 +577,9 @@ class Gather(GatherBase):
deps = self._get_package_deps(pkg)
for i, req in deps:
if i not in self.result_binary_packages:
self._add_packages([i], pulled_by=pkg, req=req, reason='binary-dep')
self._add_packages(
[i], pulled_by=pkg, req=req, reason="binary-dep"
)
added.add(i)
self.finished_add_binary_package_deps[pkg] = deps
@ -593,7 +641,7 @@ class Gather(GatherBase):
for i in deps:
if i not in self.result_binary_packages:
self._add_packages([i], pulled_by=pkg, reason='cond-dep')
self._add_packages([i], pulled_by=pkg, reason="cond-dep")
self._set_flag(pkg, PkgFlag.conditional)