Compare commits
No commits in common. "master" and "pungi-4.3.7-7.alma" have entirely different histories.
master
...
pungi-4.3.
|
@ -2,7 +2,6 @@ include AUTHORS
|
|||
include COPYING
|
||||
include GPL
|
||||
include pungi.spec
|
||||
include setup.cfg
|
||||
include tox.ini
|
||||
include share/*
|
||||
include share/multilib/*
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
# Clean up pungi cache
|
||||
d /var/cache/pungi/createrepo_c/ - - - 30d
|
142
doc/conf.py
142
doc/conf.py
|
@ -18,12 +18,12 @@ import os
|
|||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
# needs_sphinx = '1.0'
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
|
@ -31,201 +31,207 @@ import os
|
|||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = ".rst"
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = "index"
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = "Pungi"
|
||||
copyright = "2016, Red Hat, Inc."
|
||||
project = u'Pungi'
|
||||
copyright = u'2016, Red Hat, Inc.'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = "4.5"
|
||||
version = '4.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = "4.5.0"
|
||||
release = '4.3.7'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
# language = None
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
# today = ''
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
# today_fmt = '%B %d, %Y'
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ["_build"]
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
# default_role = None
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
# add_function_parentheses = True
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
# add_module_names = True
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = "sphinx"
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
# keep_warnings = False
|
||||
#keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = "default"
|
||||
html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
# html_theme_options = {}
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
# html_theme_path = []
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
# html_title = None
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
# html_short_title = None
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
# html_logo = None
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
# html_favicon = None
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
# html_extra_path = []
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
# html_use_smartypants = True
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
# html_sidebars = {}
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
# html_additional_pages = {}
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
# html_domain_indices = True
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
# html_use_index = True
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
# html_split_index = False
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
# html_show_sourcelink = True
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
# html_show_sphinx = True
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
# html_show_copyright = True
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
# html_use_opensearch = ''
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
# html_file_suffix = None
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Pungidoc"
|
||||
htmlhelp_basename = 'Pungidoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
("index", "Pungi.tex", "Pungi Documentation", "Daniel Mach", "manual"),
|
||||
('index', 'Pungi.tex', u'Pungi Documentation',
|
||||
u'Daniel Mach', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
# latex_logo = None
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
# latex_use_parts = False
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
# latex_show_pagerefs = False
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# latex_show_urls = False
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# latex_appendices = []
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# latex_domain_indices = True
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [("index", "pungi", "Pungi Documentation", ["Daniel Mach"], 1)]
|
||||
man_pages = [
|
||||
('index', 'pungi', u'Pungi Documentation',
|
||||
[u'Daniel Mach'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
# man_show_urls = False
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
@ -234,25 +240,19 @@ man_pages = [("index", "pungi", "Pungi Documentation", ["Daniel Mach"], 1)]
|
|||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(
|
||||
"index",
|
||||
"Pungi",
|
||||
"Pungi Documentation",
|
||||
"Daniel Mach",
|
||||
"Pungi",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
('index', 'Pungi', u'Pungi Documentation',
|
||||
u'Daniel Mach', 'Pungi', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
# texinfo_appendices = []
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
# texinfo_domain_indices = True
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
# texinfo_show_urls = 'footnote'
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
# texinfo_no_detailmenu = False
|
||||
#texinfo_no_detailmenu = False
|
||||
|
|
|
@ -194,17 +194,6 @@ Options
|
|||
Tracking Service Kerberos authentication. If not defined, the default
|
||||
Kerberos principal is used.
|
||||
|
||||
**cts_oidc_token_url**
|
||||
(*str*) -- URL to the OIDC token endpoint.
|
||||
For example ``https://oidc.example.com/openid-connect/token``.
|
||||
This option can be overridden by the environment variable ``CTS_OIDC_TOKEN_URL``.
|
||||
|
||||
**cts_oidc_client_id*
|
||||
(*str*) -- OIDC client ID.
|
||||
This option can be overridden by the environment variable ``CTS_OIDC_CLIENT_ID``.
|
||||
Note that environment variable ``CTS_OIDC_CLIENT_SECRET`` must be configured with
|
||||
corresponding client secret to authenticate to CTS via OIDC.
|
||||
|
||||
**compose_type**
|
||||
(*str*) -- Allows to set default compose type. Type set via a command-line
|
||||
option overwrites this.
|
||||
|
@ -592,16 +581,6 @@ Options
|
|||
with everything. Set this option to ``False`` to ignore ``noarch`` in
|
||||
``ExclusiveArch`` and always consider only binary architectures.
|
||||
|
||||
**pkgset_inherit_exclusive_arch_to_noarch** = True
|
||||
(*bool*) -- When set to ``True``, the value of ``ExclusiveArch`` or
|
||||
``ExcludeArch`` will be copied from source rpm to all its noarch packages.
|
||||
That will than limit which architectures the noarch packages can be
|
||||
included in.
|
||||
|
||||
By setting this option to ``False`` this step is skipped, and noarch
|
||||
packages will by default land in all architectures. They can still be
|
||||
excluded by listing them in a relevant section of ``filter_packages``.
|
||||
|
||||
**pkgset_allow_reuse** = True
|
||||
(*bool*) -- When set to ``True``, *Pungi* will try to reuse pkgset data
|
||||
from the old composes specified by ``--old-composes``. When enabled, this
|
||||
|
@ -941,10 +920,6 @@ Options
|
|||
comps file can not be found in the package set. When disabled (the
|
||||
default), such cases are still reported as warnings in the log.
|
||||
|
||||
With ``dnf`` gather backend, this option will abort the compose on any
|
||||
missing package no matter if it's listed in comps, ``additional_packages``
|
||||
or prepopulate file.
|
||||
|
||||
**gather_source_mapping**
|
||||
(*str*) -- JSON mapping with initial packages for the compose. The value
|
||||
should be a path to JSON file with following mapping: ``{variant: {arch:
|
||||
|
@ -1804,8 +1779,6 @@ repository with a new commit.
|
|||
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
||||
reference will not be created.
|
||||
* ``ostree_ref`` -- (*str*) To override value ``ref`` from ``treefile``.
|
||||
* ``runroot_packages`` -- (*list*) A list of additional package names to be
|
||||
installed in the runroot environment in Koji.
|
||||
|
||||
Example config
|
||||
--------------
|
||||
|
|
|
@ -19,7 +19,7 @@ Contents:
|
|||
scm_support
|
||||
messaging
|
||||
gathering
|
||||
koji
|
||||
comps
|
||||
contributing
|
||||
testing
|
||||
multi_compose
|
||||
|
|
105
doc/koji.rst
105
doc/koji.rst
|
@ -1,105 +0,0 @@
|
|||
======================
|
||||
Getting data from koji
|
||||
======================
|
||||
|
||||
When Pungi is configured to get packages from a Koji tag, it somehow needs to
|
||||
access the actual RPM files.
|
||||
|
||||
Historically, this required the storage used by Koji to be directly available
|
||||
on the host where Pungi was running. This was usually achieved by using NFS for
|
||||
the Koji volume, and mounting it on the compose host.
|
||||
|
||||
The compose could be created directly on the same volume. In such case the
|
||||
packages would be hardlinked, significantly reducing space consumption.
|
||||
|
||||
The compose could also be created on a different storage, in which case the
|
||||
packages would either need to be copied over or symlinked. Using symlinks
|
||||
requires that anything that accesses the compose (e.g. a download server) would
|
||||
also need to mount the Koji volume in the same location.
|
||||
|
||||
There is also a risk with symlinks that the package in Koji can change (due to
|
||||
being resigned for example), which would invalidate composes linking to it.
|
||||
|
||||
|
||||
Using Koji without direct mount
|
||||
===============================
|
||||
|
||||
It is possible now to run a compose from a Koji tag without direct access to
|
||||
Koji storage.
|
||||
|
||||
Pungi can download the packages over HTTP protocol, store them in a local
|
||||
cache, and consume them from there.
|
||||
|
||||
The local cache has similar structure to what is on the Koji volume.
|
||||
|
||||
When Pungi needs some package, it has a path on Koji volume. It will replace
|
||||
the ``topdir`` with the cache location. If such file exists, it will be used.
|
||||
If it doesn't exist, it will be downloaded from Koji (by replacing the
|
||||
``topdir`` with ``topurl``).
|
||||
|
||||
::
|
||||
|
||||
Koji path /mnt/koji/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
Koji URL https://kojipkgs.fedoraproject.org/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
Local path /mnt/compose/cache/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||
|
||||
The packages can be hardlinked from this cache directory.
|
||||
|
||||
|
||||
Cleanup
|
||||
-------
|
||||
|
||||
While the approach above allows each RPM to be downloaded only once, it will
|
||||
eventually result in the Koji volume being mirrored locally. Most of the
|
||||
packages will however no longer be needed.
|
||||
|
||||
There is a script ``pungi-cache-cleanup`` that can help with that. It can find
|
||||
and remove files from the cache that are no longer needed.
|
||||
|
||||
A file is no longer needed if it has a single link (meaning it is only in the
|
||||
cache, not in any compose), and it has mtime older than a given threshold.
|
||||
|
||||
It doesn't make sense to delete files that are hardlinked in an existing
|
||||
compose as it would not save any space anyway.
|
||||
|
||||
The mtime check is meant to preserve files that are downloaded but not actually
|
||||
used in a compose, like a subpackage that is not included in any variant. Every
|
||||
time its existence in the local cache is checked, the mtime is updated.
|
||||
|
||||
|
||||
Race conditions?
|
||||
----------------
|
||||
|
||||
It should be safe to have multiple compose hosts share the same storage volume
|
||||
for generated composes and local cache.
|
||||
|
||||
If a cache file is accessed and it exists, there's no risk of race condition.
|
||||
|
||||
If two composes need the same file at the same time and it is not present yet,
|
||||
one of them will take a lock on it and start downloading. The other will wait
|
||||
until the download is finished.
|
||||
|
||||
The lock is only valid for a set amount of time (5 minutes) to avoid issues
|
||||
where the downloading process is killed in a way that blocks it from releasing
|
||||
the lock.
|
||||
|
||||
If the file is large and network slow, the limit may not be enough finish
|
||||
downloading. In that case the second process will steal the lock while the
|
||||
first process is still downloading. This will result in the same file being
|
||||
downloaded twice.
|
||||
|
||||
When the first process finishes the download, it will put the file into the
|
||||
local cache location. When the second process finishes, it will atomically
|
||||
replace it, but since it's the same file it will be the same file.
|
||||
|
||||
If the first compose already managed to hardlink the file before it gets
|
||||
replaced, there will be two copies of the file present locally.
|
||||
|
||||
|
||||
Integrity checking
|
||||
------------------
|
||||
|
||||
There is minimal integrity checking. RPM packages belonging to real builds will
|
||||
be check to match the checksum provided by Koji hub.
|
||||
|
||||
There is no checking for scratch builds or any images.
|
|
@ -0,0 +1,107 @@
|
|||
.. _multi_compose:
|
||||
|
||||
Managing compose from multiple parts
|
||||
====================================
|
||||
|
||||
There may be cases where it makes sense to split a big compose into separate
|
||||
parts, but create a compose output that links all output into one familiar
|
||||
structure.
|
||||
|
||||
The `pungi-orchestrate` tools allows that.
|
||||
|
||||
It works with an INI-style configuration file. The ``[general]`` section
|
||||
contains information about identity of the main compose. Other sections define
|
||||
individual parts.
|
||||
|
||||
The parts are scheduled to run in parallel, with the minimal amount of
|
||||
serialization. The final compose directory will contain hard-links to the
|
||||
files.
|
||||
|
||||
|
||||
General settings
|
||||
----------------
|
||||
|
||||
**target**
|
||||
Path to directory where the final compose should be created.
|
||||
**compose_type**
|
||||
Type of compose to make.
|
||||
**release_name**
|
||||
Name of the product for the final compose.
|
||||
**release_short**
|
||||
Short name of the product for the final compose.
|
||||
**release_version**
|
||||
Version of the product for the final compose.
|
||||
**release_type**
|
||||
Type of the product for the final compose.
|
||||
**extra_args**
|
||||
Additional arguments that will be passed to the child Pungi processes.
|
||||
**koji_profile**
|
||||
If specified, a current event will be retrieved from the Koji instance and
|
||||
used for all parts.
|
||||
|
||||
**kerberos**
|
||||
If set to yes, a kerberos ticket will be automatically created at the start.
|
||||
Set keytab and principal as well.
|
||||
**kerberos_keytab**
|
||||
Path to keytab file used to create the kerberos ticket.
|
||||
**kerberos_principal**
|
||||
Kerberos principal for the ticket
|
||||
|
||||
**pre_compose_script**
|
||||
Commands to execute before first part is started. Can contain multiple
|
||||
commands on separate lines.
|
||||
**post_compose_script**
|
||||
Commands to execute after the last part finishes and final status is
|
||||
updated. Can contain multiple commands on separate lines. ::
|
||||
|
||||
post_compose_script =
|
||||
compose-latest-symlink $COMPOSE_PATH
|
||||
custom-post-compose-script.sh
|
||||
|
||||
Multiple environment variables are defined for the scripts:
|
||||
|
||||
* ``COMPOSE_PATH``
|
||||
* ``COMPOSE_ID``
|
||||
* ``COMPOSE_DATE``
|
||||
* ``COMPOSE_TYPE``
|
||||
* ``COMPOSE_RESPIN``
|
||||
* ``COMPOSE_LABEL``
|
||||
* ``RELEASE_ID``
|
||||
* ``RELEASE_NAME``
|
||||
* ``RELEASE_SHORT``
|
||||
* ``RELEASE_VERSION``
|
||||
* ``RELEASE_TYPE``
|
||||
* ``RELEASE_IS_LAYERED`` – ``YES`` for layered products, empty otherwise
|
||||
* ``BASE_PRODUCT_NAME`` – only set for layered products
|
||||
* ``BASE_PRODUCT_SHORT`` – only set for layered products
|
||||
* ``BASE_PRODUCT_VERSION`` – only set for layered products
|
||||
* ``BASE_PRODUCT_TYPE`` – only set for layered products
|
||||
|
||||
**notification_script**
|
||||
Executable name (or path to a script) that will be used to send a message
|
||||
once the compose is finished. In order for a valid URL to be included in the
|
||||
message, at least one part must configure path translation that would apply
|
||||
to location of main compose.
|
||||
|
||||
Only two messages will be sent, one for start and one for finish (either
|
||||
successful or not).
|
||||
|
||||
|
||||
Partial compose settings
|
||||
------------------------
|
||||
|
||||
Each part should have a separate section in the config file.
|
||||
|
||||
It can specify these options:
|
||||
|
||||
**config**
|
||||
Path to configuration file that describes this part. If relative, it is
|
||||
resolved relative to the file with parts configuration.
|
||||
**just_phase**, **skip_phase**
|
||||
Customize which phases should run for this part.
|
||||
**depends_on**
|
||||
A comma separated list of other parts that must be finished before this part
|
||||
starts.
|
||||
**failable**
|
||||
A boolean toggle to mark a part as failable. A failure in such part will
|
||||
mark the final compose as incomplete, but still successful.
|
|
@ -41,14 +41,6 @@ which can contain following keys.
|
|||
* ``command`` -- defines a shell command to run after Git clone to generate the
|
||||
needed file (for example to run ``make``). Only supported in Git backend.
|
||||
|
||||
* ``options`` -- a dictionary of additional configuration options. These are
|
||||
specific to different backends.
|
||||
|
||||
Currently supported values for Git:
|
||||
|
||||
* ``credential_helper`` -- path to a credential helper used to supply
|
||||
username/password for remotes that require authentication.
|
||||
|
||||
|
||||
Koji examples
|
||||
-------------
|
||||
|
|
122
pungi.spec
122
pungi.spec
|
@ -1,8 +1,8 @@
|
|||
%{?python_enable_dependency_generator}
|
||||
|
||||
Name: pungi
|
||||
Version: 4.5.0
|
||||
Release: 3%{?dist}.alma
|
||||
Version: 4.3.7
|
||||
Release: 7%{?dist}.alma
|
||||
Summary: Distribution compose tool
|
||||
|
||||
License: GPL-2.0-only
|
||||
|
@ -11,14 +11,15 @@ Source0: %{name}-%{version}.tar.bz2
|
|||
|
||||
BuildRequires: make
|
||||
BuildRequires: python3-pytest
|
||||
# replaced by unittest.mock
|
||||
# BuildRequires: python3-mock
|
||||
BuildRequires: python3-pyfakefs
|
||||
BuildRequires: python3-ddt
|
||||
BuildRequires: python3-devel
|
||||
BuildRequires: python3-setuptools
|
||||
BuildRequires: python3-productmd >= 1.33
|
||||
BuildRequires: python3-kobo-rpmlib >= 0.18.0
|
||||
BuildRequires: createrepo_c >= 0.20.1
|
||||
BuildRequires: python3-lxml
|
||||
BuildRequires: python3-ddt
|
||||
BuildRequires: python3-kickstart
|
||||
BuildRequires: python3-rpm
|
||||
BuildRequires: python3-dnf
|
||||
|
@ -36,21 +37,23 @@ BuildRequires: python3-gobject
|
|||
BuildRequires: python3-createrepo_c >= 0.20.1
|
||||
BuildRequires: python3-dogpile-cache
|
||||
BuildRequires: python3-parameterized
|
||||
BuildRequires: python3-flufl-lock
|
||||
BuildRequires: python3-ddt
|
||||
BuildRequires: python3-distro
|
||||
BuildRequires: python3-gobject-base
|
||||
BuildRequires: python3-pgpy
|
||||
BuildRequires: python3-pyfakefs
|
||||
BuildRequires: python3-distro
|
||||
%if %{rhel} == 8
|
||||
BuildRequires: python3-dataclasses
|
||||
%endif
|
||||
BuildRequires: python3-pgpy
|
||||
|
||||
#deps for doc building
|
||||
BuildRequires: python3-sphinx
|
||||
|
||||
Requires: python3-kobo-rpmlib >= 0.18.0
|
||||
Requires: python3-productmd >= 1.33
|
||||
Requires: python3-kickstart
|
||||
Requires: python3-requests
|
||||
%if %{rhel} == 8
|
||||
Requires: python3-dataclasses
|
||||
%endif
|
||||
Requires: createrepo_c >= 0.20.1
|
||||
Requires: koji >= 1.10.1-13
|
||||
Requires: python3-koji-cli-plugins
|
||||
|
@ -65,17 +68,11 @@ Requires: python3-libmodulemd >= 2.8.0
|
|||
Requires: python3-gobject
|
||||
Requires: python3-createrepo_c >= 0.20.1
|
||||
Requires: python3-PyYAML
|
||||
Requires: python3-productmd >= 1.28
|
||||
Requires: python3-flufl-lock
|
||||
Requires: python3-productmd >= 1.33
|
||||
Requires: lorax
|
||||
Requires: python3-distro
|
||||
Requires: python3-productmd >= 1.28R
|
||||
Requires: python3-gobject-base
|
||||
Requires: lorax
|
||||
Requires: python3-pgpy
|
||||
Requires: python3-requests
|
||||
%if %{rhel} == 8
|
||||
Requires: python3-dataclasses
|
||||
%endif
|
||||
Requires: python3-distro
|
||||
|
||||
# This package is not available on i686, hence we cannot require it
|
||||
# See https://bugzilla.redhat.com/show_bug.cgi?id=1743421
|
||||
|
@ -91,7 +88,6 @@ A tool to create anaconda based installation trees/isos of a set of rpms.
|
|||
%package utils
|
||||
Summary: Utilities for working with finished composes
|
||||
Requires: pungi = %{version}-%{release}
|
||||
Requires: python3-fedora-messaging
|
||||
|
||||
%description utils
|
||||
These utilities work with finished composes produced by Pungi. They can be used
|
||||
|
@ -100,8 +96,8 @@ notification to Fedora Message Bus.
|
|||
|
||||
%package -n python3-%{name}
|
||||
Summary: Python 3 libraries for pungi
|
||||
Requires: fus
|
||||
Requires: python3-attrs
|
||||
Requires: fus
|
||||
|
||||
%description -n python3-%{name}
|
||||
Python library with code for Pungi. This is not a public library and there are
|
||||
|
@ -144,9 +140,7 @@ rm %{buildroot}%{_bindir}/pungi
|
|||
%{_bindir}/%{name}-make-ostree
|
||||
%{_mandir}/man1/pungi.1.gz
|
||||
%{_datadir}/pungi
|
||||
%{_localstatedir}/cache/pungi
|
||||
%dir %attr(1777, root, root) %{_localstatedir}/cache/pungi/createrepo_c
|
||||
%{_tmpfilesdir}/pungi-clean-cache.conf
|
||||
/var/cache/pungi
|
||||
|
||||
%files -n python3-%{name}
|
||||
%{python3_sitelib}/%{name}
|
||||
|
@ -157,72 +151,14 @@ rm %{buildroot}%{_bindir}/pungi
|
|||
%{_bindir}/%{name}-create-unified-isos
|
||||
%{_bindir}/%{name}-config-dump
|
||||
%{_bindir}/%{name}-config-validate
|
||||
%{_bindir}/%{name}-fedmsg-notification
|
||||
%{_bindir}/%{name}-notification-report-progress
|
||||
%{_bindir}/%{name}-orchestrate
|
||||
%{_bindir}/%{name}-patch-iso
|
||||
%{_bindir}/%{name}-compare-depsolving
|
||||
%{_bindir}/%{name}-wait-for-signed-ostree-handler
|
||||
%{_bindir}/%{name}-cache-cleanup
|
||||
|
||||
|
||||
%changelog
|
||||
* Mon Nov 21 2023 Stepan Oksanichenko <soksanichenko@almalinux.org> - 4.5.0-3
|
||||
- Method `get_remote_file_content` is object's method now
|
||||
|
||||
* Wed Nov 15 2023 Stepan Oksanichenko <soksanichenko@almalinux.org> - 4.5.0-2
|
||||
- Return empty list if a repo doesn't contain any module
|
||||
|
||||
* Thu Aug 31 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.5.0-1
|
||||
- kojiwrapper: Stop being smart about local access (lsedlar)
|
||||
- Fix unittest errors (ounsal)
|
||||
- Add integrity checking for builds (lsedlar)
|
||||
- Add script for cleaning up the cache (lsedlar)
|
||||
- Add ability to download images (lsedlar)
|
||||
- Add support for not having koji volume mounted locally (lsedlar)
|
||||
- Remove repository cloning multiple times (abisoi)
|
||||
- Support require_all_comps_packages on DNF backend (lsedlar)
|
||||
- Fix new warnings from flake8 (lsedlar)
|
||||
|
||||
* Tue Jul 25 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-8
|
||||
- Option `excluded-packages` for script `pungi-gather-rpms`
|
||||
|
||||
* Tue Jul 25 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.1-1
|
||||
- ostree: Add configuration for custom runroot packages (lsedlar)
|
||||
- pkgset: Emit better error for missing modulemd file (lsedlar)
|
||||
- Add support for git-credential-helper (lsedlar)
|
||||
- Support OIDC Client Credentials authentication to CTS (hlin)
|
||||
|
||||
* Fri Jul 21 2023 Fedora Release Engineering <releng@fedoraproject.org> - 4.4.0-4
|
||||
- Rebuilt for https://fedoraproject.org/wiki/Fedora_39_Mass_Rebuild
|
||||
|
||||
* Wed Jul 19 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.0-3
|
||||
- Backport ostree runroot package additions
|
||||
|
||||
* Wed Jul 19 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.0-2
|
||||
- Backport ostree runroot package additions
|
||||
|
||||
* Mon Jun 19 2023 Python Maint <python-maint@redhat.com> - 4.4.0-2
|
||||
- Rebuilt for Python 3.12
|
||||
|
||||
* Wed Jun 07 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.0-1
|
||||
- gather-dnf: Run latest() later (lsedlar)
|
||||
- iso: Support joliet long names (lsedlar)
|
||||
- Drop pungi-orchestrator code (lsedlar)
|
||||
- isos: Ensure proper file ownership and permissions (lsedlar)
|
||||
- gather: Always get latest packages (lsedlar)
|
||||
- Add back compatibility with jsonschema <3.0.0 (lsedlar)
|
||||
- Remove useless debug message (lsedlar)
|
||||
- Remove fedmsg from requirements (lsedlar)
|
||||
- gather: Support dotarch in DNF backend (lsedlar)
|
||||
- Fix compatibility with createrepo_c 0.21.1 (lsedlar)
|
||||
- comps: Apply arch filtering to environment/optionlist (lsedlar)
|
||||
- Add config file for cleaning up cache files (hlin)
|
||||
|
||||
* Wed May 17 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.8-3
|
||||
- Rebuild without fedmsg dependency
|
||||
|
||||
* Wed May 03 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.8-1
|
||||
- Set priority for Fedora messages
|
||||
|
||||
* Thu Apr 13 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-7
|
||||
- gather-module can find modules through symlinks
|
||||
|
||||
|
@ -238,26 +174,6 @@ rm %{buildroot}%{_bindir}/pungi
|
|||
- [Generator of packages.json] Replace using CLI by config.yaml
|
||||
- [Gather RPMs] os.path is replaced by Pat
|
||||
|
||||
* Thu Mar 30 2023 Haibo Lin <hlin@redhat.com> - 4.3.8-1
|
||||
- createiso: Update possibly changed file on DVD (lsedlar)
|
||||
- pkgset: Stop reuse if configuration changed (lsedlar)
|
||||
- Allow disabling inheriting ExcludeArch to noarch packages (lsedlar)
|
||||
- pkgset: Support extra builds with no tags (lsedlar)
|
||||
- buildinstall: Avoid pointlessly tweaking the boot images (lsedlar)
|
||||
- Prevent to reuse if unsigned packages are allowed (hlin)
|
||||
- Pass parent id/respin id to CTS (lsedlar)
|
||||
- Exclude existing files in boot.iso (hlin)
|
||||
- image-build/osbuild: Pull ISOs into the compose (lsedlar)
|
||||
- Retry 401 error from CTS (lsedlar)
|
||||
- gather: Better detection of debuginfo in lookaside (lsedlar)
|
||||
- Log versions of all installed packages (hlin)
|
||||
- Use authentication for all CTS calls (lsedlar)
|
||||
- Fix black complaints (lsedlar)
|
||||
- Add vhd.gz extension to compressed VHD images (lsedlar)
|
||||
- Add vhd-compressed image type (lsedlar)
|
||||
- Update to work with latest mock (lsedlar)
|
||||
- Default bztar format for sdist command (onosek)
|
||||
|
||||
* Fri Mar 17 2023 Stepan Oksanichenko <soksanichenko@cloudlinux.com> - 4.3.7-3
|
||||
- ALBS-987: Generate i686 repositories with pungi on building new distr. version automatically
|
||||
- KojiMock extracts all modules which are suitable for the variant's arches
|
||||
|
|
|
@ -39,6 +39,7 @@ from __future__ import print_function
|
|||
import multiprocessing
|
||||
import os.path
|
||||
import platform
|
||||
import distro
|
||||
import re
|
||||
|
||||
import jsonschema
|
||||
|
@ -227,19 +228,9 @@ def validate(config, offline=False, schema=None):
|
|||
DefaultValidator = _extend_with_default_and_alias(
|
||||
jsonschema.Draft4Validator, offline=offline
|
||||
)
|
||||
|
||||
if hasattr(jsonschema.Draft4Validator, "TYPE_CHECKER"):
|
||||
# jsonschema >= 3.0 has new interface for checking types
|
||||
validator = DefaultValidator(schema)
|
||||
else:
|
||||
validator = DefaultValidator(
|
||||
schema,
|
||||
{
|
||||
"array": (tuple, list),
|
||||
"regex": six.string_types,
|
||||
"url": six.string_types,
|
||||
},
|
||||
)
|
||||
validator = DefaultValidator(
|
||||
schema,
|
||||
)
|
||||
errors = []
|
||||
warnings = []
|
||||
for error in validator.iter_errors(config):
|
||||
|
@ -387,7 +378,6 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||
instance[property]["branch"] = resolver(
|
||||
instance[property]["repo"],
|
||||
instance[property].get("branch") or "HEAD",
|
||||
instance[property].get("options"),
|
||||
)
|
||||
|
||||
for error in _hook_errors(properties, instance, schema):
|
||||
|
@ -455,31 +445,49 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||
context=all_errors,
|
||||
)
|
||||
|
||||
kwargs = {}
|
||||
if hasattr(validator_class, "TYPE_CHECKER"):
|
||||
# jsonschema >= 3
|
||||
def is_array(checker, instance):
|
||||
return isinstance(instance, (tuple, list))
|
||||
def is_array(checker, instance):
|
||||
return isinstance(instance, (tuple, list))
|
||||
|
||||
def is_string_type(checker, instance):
|
||||
return isinstance(instance, six.string_types)
|
||||
def is_string_type(checker, instance):
|
||||
return isinstance(instance, six.string_types)
|
||||
|
||||
kwargs["type_checker"] = validator_class.TYPE_CHECKER.redefine_many(
|
||||
# RHEL9 has newer version of package jsonschema
|
||||
# which has another way of working with validators
|
||||
if float(distro.linux_distribution()[1]) < 9:
|
||||
validator = jsonschema.validators.extend(
|
||||
validator_class,
|
||||
{
|
||||
"properties": properties_validator,
|
||||
"deprecated": error_on_deprecated,
|
||||
"type": validate_regex_type,
|
||||
"required": _validate_required,
|
||||
"additionalProperties": _validate_additional_properties,
|
||||
"anyOf": _validate_any_of,
|
||||
},
|
||||
)
|
||||
validator.DEFAULT_TYPES.update({
|
||||
"array": (list, tuple),
|
||||
"regex": six.string_types,
|
||||
"url": six.string_types,
|
||||
})
|
||||
else:
|
||||
type_checker = validator_class.TYPE_CHECKER.redefine_many(
|
||||
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
||||
)
|
||||
|
||||
return jsonschema.validators.extend(
|
||||
validator_class,
|
||||
{
|
||||
"properties": properties_validator,
|
||||
"deprecated": error_on_deprecated,
|
||||
"type": validate_regex_type,
|
||||
"required": _validate_required,
|
||||
"additionalProperties": _validate_additional_properties,
|
||||
"anyOf": _validate_any_of,
|
||||
},
|
||||
**kwargs
|
||||
)
|
||||
validator = jsonschema.validators.extend(
|
||||
validator_class,
|
||||
{
|
||||
"properties": properties_validator,
|
||||
"deprecated": error_on_deprecated,
|
||||
"type": validate_regex_type,
|
||||
"required": _validate_required,
|
||||
"additionalProperties": _validate_additional_properties,
|
||||
"anyOf": _validate_any_of,
|
||||
},
|
||||
type_checker=type_checker,
|
||||
)
|
||||
return validator
|
||||
|
||||
|
||||
class ConfigDeprecation(jsonschema.exceptions.ValidationError):
|
||||
|
@ -521,13 +529,6 @@ def make_schema():
|
|||
"file": {"type": "string"},
|
||||
"dir": {"type": "string"},
|
||||
"command": {"type": "string"},
|
||||
"options": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"credential_helper": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
|
@ -845,11 +846,8 @@ def make_schema():
|
|||
"pdc_insecure": {"deprecated": "Koji is queried instead"},
|
||||
"cts_url": {"type": "string"},
|
||||
"cts_keytab": {"type": "string"},
|
||||
"cts_oidc_token_url": {"type": "url"},
|
||||
"cts_oidc_client_id": {"type": "string"},
|
||||
"koji_profile": {"type": "string"},
|
||||
"koji_event": {"type": "number"},
|
||||
"koji_cache": {"type": "string"},
|
||||
"pkgset_koji_tag": {"$ref": "#/definitions/strings"},
|
||||
"pkgset_koji_builds": {"$ref": "#/definitions/strings"},
|
||||
"pkgset_koji_scratch_tasks": {"$ref": "#/definitions/strings"},
|
||||
|
@ -867,10 +865,6 @@ def make_schema():
|
|||
"type": "boolean",
|
||||
"default": True,
|
||||
},
|
||||
"pkgset_inherit_exclusive_arch_to_noarch": {
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
},
|
||||
"pkgset_scratch_modules": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
|
@ -1073,9 +1067,6 @@ def make_schema():
|
|||
"config_branch": {"type": "string"},
|
||||
"tag_ref": {"type": "boolean"},
|
||||
"ostree_ref": {"type": "string"},
|
||||
"runroot_packages": {
|
||||
"$ref": "#/definitions/list_of_strings",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"treefile",
|
||||
|
|
167
pungi/compose.py
167
pungi/compose.py
|
@ -17,7 +17,6 @@
|
|||
__all__ = ("Compose",)
|
||||
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
|
@ -39,7 +38,6 @@ from dogpile.cache import make_region
|
|||
from pungi.graph import SimpleAcyclicOrientedGraph
|
||||
from pungi.wrappers.variants import VariantsXmlParser
|
||||
from pungi.paths import Paths
|
||||
from pungi.wrappers.kojiwrapper import KojiDownloadProxy
|
||||
from pungi.wrappers.scm import get_file_from_scm
|
||||
from pungi.util import (
|
||||
makedirs,
|
||||
|
@ -59,101 +57,20 @@ except ImportError:
|
|||
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
||||
|
||||
|
||||
def is_status_fatal(status_code):
|
||||
"""Check if status code returned from CTS reports an error that is unlikely
|
||||
to be fixed by retrying. Generally client errors (4XX) are fatal, with the
|
||||
exception of 401 Unauthorized which could be caused by transient network
|
||||
issue between compose host and KDC.
|
||||
"""
|
||||
if status_code == 401:
|
||||
return False
|
||||
return status_code >= 400 and status_code < 500
|
||||
|
||||
|
||||
@retry(wait_on=RequestException)
|
||||
def retry_request(method, url, data=None, json_data=None, auth=None):
|
||||
"""
|
||||
:param str method: Reqest method.
|
||||
:param str url: Target URL.
|
||||
:param dict data: form-urlencoded data to send in the body of the request.
|
||||
:param dict json_data: json data to send in the body of the request.
|
||||
"""
|
||||
def retry_request(method, url, data=None, auth=None):
|
||||
request_method = getattr(requests, method)
|
||||
rv = request_method(url, data=data, json=json_data, auth=auth)
|
||||
if is_status_fatal(rv.status_code):
|
||||
rv = request_method(url, json=data, auth=auth)
|
||||
if rv.status_code >= 400 and rv.status_code < 500:
|
||||
try:
|
||||
error = rv.json()
|
||||
error = rv.json()["message"]
|
||||
except ValueError:
|
||||
error = rv.text
|
||||
raise RuntimeError("%s responded with %d: %s" % (url, rv.status_code, error))
|
||||
raise RuntimeError("CTS responded with %d: %s" % (rv.status_code, error))
|
||||
rv.raise_for_status()
|
||||
return rv
|
||||
|
||||
|
||||
class BearerAuth(requests.auth.AuthBase):
|
||||
def __init__(self, token):
|
||||
self.token = token
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers["authorization"] = "Bearer " + self.token
|
||||
return r
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def cts_auth(pungi_conf):
|
||||
"""
|
||||
:param dict pungi_conf: dict obj of pungi.json config.
|
||||
"""
|
||||
auth = None
|
||||
token = None
|
||||
cts_keytab = pungi_conf.get("cts_keytab")
|
||||
cts_oidc_token_url = os.environ.get("CTS_OIDC_TOKEN_URL", "") or pungi_conf.get(
|
||||
"cts_oidc_token_url"
|
||||
)
|
||||
|
||||
try:
|
||||
if cts_keytab:
|
||||
# requests-kerberos cannot accept custom keytab, we need to use
|
||||
# environment variable for this. But we need to change environment
|
||||
# only temporarily just for this single requests.post.
|
||||
# So at first backup the current environment and revert to it
|
||||
# after the requests call.
|
||||
from requests_kerberos import HTTPKerberosAuth
|
||||
|
||||
auth = HTTPKerberosAuth()
|
||||
environ_copy = dict(os.environ)
|
||||
if "$HOSTNAME" in cts_keytab:
|
||||
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
||||
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
||||
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
||||
elif cts_oidc_token_url:
|
||||
cts_oidc_client_id = os.environ.get(
|
||||
"CTS_OIDC_CLIENT_ID", ""
|
||||
) or pungi_conf.get("cts_oidc_client_id", "")
|
||||
token = retry_request(
|
||||
"post",
|
||||
cts_oidc_token_url,
|
||||
data={
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": cts_oidc_client_id,
|
||||
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||
},
|
||||
).json()["access_token"]
|
||||
auth = BearerAuth(token)
|
||||
del token
|
||||
|
||||
yield auth
|
||||
except Exception as e:
|
||||
# Avoid leaking client secret in trackback
|
||||
e.show_locals = False
|
||||
raise e
|
||||
finally:
|
||||
if cts_keytab:
|
||||
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
||||
os.environ.clear()
|
||||
os.environ.update(environ_copy)
|
||||
|
||||
|
||||
def get_compose_info(
|
||||
conf,
|
||||
compose_type="production",
|
||||
|
@ -183,19 +100,38 @@ def get_compose_info(
|
|||
ci.compose.type = compose_type
|
||||
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
||||
ci.compose.respin = compose_respin or 0
|
||||
ci.compose.id = ci.create_compose_id()
|
||||
|
||||
cts_url = conf.get("cts_url")
|
||||
cts_url = conf.get("cts_url", None)
|
||||
if cts_url:
|
||||
# Create compose in CTS and get the reserved compose ID.
|
||||
url = os.path.join(cts_url, "api/1/composes/")
|
||||
data = {
|
||||
"compose_info": json.loads(ci.dumps()),
|
||||
"parent_compose_ids": parent_compose_ids,
|
||||
"respin_of": respin_of,
|
||||
}
|
||||
with cts_auth(conf) as authentication:
|
||||
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||
# Requests-kerberos cannot accept custom keytab, we need to use
|
||||
# environment variable for this. But we need to change environment
|
||||
# only temporarily just for this single requests.post.
|
||||
# So at first backup the current environment and revert to it
|
||||
# after the requests.post call.
|
||||
cts_keytab = conf.get("cts_keytab", None)
|
||||
authentication = get_authentication(conf)
|
||||
if cts_keytab:
|
||||
environ_copy = dict(os.environ)
|
||||
if "$HOSTNAME" in cts_keytab:
|
||||
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
||||
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
||||
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
# Create compose in CTS and get the reserved compose ID.
|
||||
ci.compose.id = ci.create_compose_id()
|
||||
url = os.path.join(cts_url, "api/1/composes/")
|
||||
data = {
|
||||
"compose_info": json.loads(ci.dumps()),
|
||||
"parent_compose_ids": parent_compose_ids,
|
||||
"respin_of": respin_of,
|
||||
}
|
||||
rv = retry_request("post", url, data=data, auth=authentication)
|
||||
finally:
|
||||
if cts_keytab:
|
||||
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
||||
os.environ.clear()
|
||||
os.environ.update(environ_copy)
|
||||
|
||||
# Update local ComposeInfo with received ComposeInfo.
|
||||
cts_ci = ComposeInfo()
|
||||
|
@ -203,9 +139,22 @@ def get_compose_info(
|
|||
ci.compose.respin = cts_ci.compose.respin
|
||||
ci.compose.id = cts_ci.compose.id
|
||||
|
||||
else:
|
||||
ci.compose.id = ci.create_compose_id()
|
||||
|
||||
return ci
|
||||
|
||||
|
||||
def get_authentication(conf):
|
||||
authentication = None
|
||||
cts_keytab = conf.get("cts_keytab", None)
|
||||
if cts_keytab:
|
||||
from requests_kerberos import HTTPKerberosAuth
|
||||
|
||||
authentication = HTTPKerberosAuth()
|
||||
return authentication
|
||||
|
||||
|
||||
def write_compose_info(compose_dir, ci):
|
||||
"""
|
||||
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
||||
|
@ -219,6 +168,7 @@ def write_compose_info(compose_dir, ci):
|
|||
|
||||
|
||||
def update_compose_url(compose_id, compose_dir, conf):
|
||||
authentication = get_authentication(conf)
|
||||
cts_url = conf.get("cts_url", None)
|
||||
if cts_url:
|
||||
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
||||
|
@ -231,8 +181,7 @@ def update_compose_url(compose_id, compose_dir, conf):
|
|||
"action": "set_url",
|
||||
"compose_url": compose_url,
|
||||
}
|
||||
with cts_auth(conf) as authentication:
|
||||
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||
return retry_request("patch", url, data=data, auth=authentication)
|
||||
|
||||
|
||||
def get_compose_dir(
|
||||
|
@ -243,19 +192,11 @@ def get_compose_dir(
|
|||
compose_respin=None,
|
||||
compose_label=None,
|
||||
already_exists_callbacks=None,
|
||||
parent_compose_ids=None,
|
||||
respin_of=None,
|
||||
):
|
||||
already_exists_callbacks = already_exists_callbacks or []
|
||||
|
||||
ci = get_compose_info(
|
||||
conf,
|
||||
compose_type,
|
||||
compose_date,
|
||||
compose_respin,
|
||||
compose_label,
|
||||
parent_compose_ids,
|
||||
respin_of,
|
||||
conf, compose_type, compose_date, compose_respin, compose_label
|
||||
)
|
||||
|
||||
cts_url = conf.get("cts_url", None)
|
||||
|
@ -410,8 +351,6 @@ class Compose(kobo.log.LoggingBase):
|
|||
else:
|
||||
self.cache_region = make_region().configure("dogpile.cache.null")
|
||||
|
||||
self.koji_downloader = KojiDownloadProxy.from_config(self.conf, self._logger)
|
||||
|
||||
get_compose_info = staticmethod(get_compose_info)
|
||||
write_compose_info = staticmethod(write_compose_info)
|
||||
get_compose_dir = staticmethod(get_compose_dir)
|
||||
|
@ -707,7 +646,7 @@ class Compose(kobo.log.LoggingBase):
|
|||
separators=(",", ": "),
|
||||
)
|
||||
|
||||
def traceback(self, detail=None, show_locals=True):
|
||||
def traceback(self, detail=None):
|
||||
"""Store an extended traceback. This method should only be called when
|
||||
handling an exception.
|
||||
|
||||
|
@ -719,7 +658,7 @@ class Compose(kobo.log.LoggingBase):
|
|||
tb_path = self.paths.log.log_file("global", basename)
|
||||
self.log_error("Extended traceback in: %s", tb_path)
|
||||
with open(tb_path, "wb") as f:
|
||||
f.write(kobo.tback.Traceback(show_locals=show_locals).get_traceback())
|
||||
f.write(kobo.tback.Traceback().get_traceback())
|
||||
|
||||
def load_old_compose_config(self):
|
||||
"""
|
||||
|
|
|
@ -5,14 +5,11 @@ from __future__ import print_function
|
|||
import os
|
||||
import six
|
||||
from collections import namedtuple
|
||||
from kobo.shortcuts import run
|
||||
from six.moves import shlex_quote
|
||||
|
||||
from .wrappers import iso
|
||||
from .wrappers.jigdo import JigdoWrapper
|
||||
|
||||
from .phases.buildinstall import BOOT_CONFIGS, BOOT_IMAGES
|
||||
|
||||
|
||||
CreateIsoOpts = namedtuple(
|
||||
"CreateIsoOpts",
|
||||
|
@ -121,46 +118,7 @@ def make_jigdo(f, opts):
|
|||
emit(f, cmd)
|
||||
|
||||
|
||||
def _get_perms(fs_path):
|
||||
"""Compute proper permissions for a file.
|
||||
|
||||
This mimicks what -rational-rock option of genisoimage does. All read bits
|
||||
are set, so that files and directories are globally readable. If any
|
||||
execute bit is set for a file, set them all. No writes are allowed and
|
||||
special bits are erased too.
|
||||
"""
|
||||
statinfo = os.stat(fs_path)
|
||||
perms = 0o444
|
||||
if statinfo.st_mode & 0o111:
|
||||
perms |= 0o111
|
||||
return perms
|
||||
|
||||
|
||||
def write_xorriso_commands(opts):
|
||||
# Create manifest for the boot.iso listing all contents
|
||||
boot_iso_manifest = "%s.manifest" % os.path.join(
|
||||
opts.script_dir, os.path.basename(opts.boot_iso)
|
||||
)
|
||||
run(
|
||||
iso.get_manifest_cmd(
|
||||
opts.boot_iso, opts.use_xorrisofs, output_file=boot_iso_manifest
|
||||
)
|
||||
)
|
||||
# Find which files may have been updated by pungi. This only includes a few
|
||||
# files from tweaking buildinstall and .discinfo metadata. There's no good
|
||||
# way to detect whether the boot config files actually changed, so we may
|
||||
# be updating files in the ISO with the same data.
|
||||
UPDATEABLE_FILES = set(BOOT_IMAGES + BOOT_CONFIGS + [".discinfo"])
|
||||
updated_files = set()
|
||||
excluded_files = set()
|
||||
with open(boot_iso_manifest) as f:
|
||||
for line in f:
|
||||
path = line.lstrip("/").rstrip("\n")
|
||||
if path in UPDATEABLE_FILES:
|
||||
updated_files.add(path)
|
||||
else:
|
||||
excluded_files.add(path)
|
||||
|
||||
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
||||
with open(script, "w") as f:
|
||||
emit(f, "-indev %s" % opts.boot_iso)
|
||||
|
@ -169,25 +127,16 @@ def write_xorriso_commands(opts):
|
|||
emit(f, "-volid %s" % opts.volid)
|
||||
# isoinfo -J uses the Joliet tree, and it's used by virt-install
|
||||
emit(f, "-joliet on")
|
||||
# Support long filenames in the Joliet trees. Repodata is particularly
|
||||
# likely to run into this limit.
|
||||
emit(f, "-compliance joliet_long_names")
|
||||
|
||||
with open(opts.graft_points) as gp:
|
||||
for line in gp:
|
||||
iso_path, fs_path = line.strip().split("=", 1)
|
||||
if iso_path in excluded_files:
|
||||
continue
|
||||
cmd = "-update" if iso_path in updated_files else "-map"
|
||||
emit(f, "%s %s %s" % (cmd, fs_path, iso_path))
|
||||
emit(f, "-chmod 0%o %s" % (_get_perms(fs_path), iso_path))
|
||||
emit(f, "-map %s %s" % (fs_path, iso_path))
|
||||
|
||||
if opts.arch == "ppc64le":
|
||||
# This is needed for the image to be bootable.
|
||||
emit(f, "-as mkisofs -U --")
|
||||
|
||||
emit(f, "-chown_r 0 /")
|
||||
emit(f, "-chgrp_r 0 /")
|
||||
emit(f, "-end")
|
||||
return script
|
||||
|
||||
|
|
|
@ -1118,6 +1118,7 @@ class Pungi(PungiBase):
|
|||
self.logger.info("Finished gathering package objects.")
|
||||
|
||||
def gather(self):
|
||||
|
||||
# get package objects according to the input list
|
||||
self.getPackageObjects()
|
||||
if self.is_sources:
|
||||
|
|
|
@ -15,20 +15,17 @@
|
|||
|
||||
|
||||
from enum import Enum
|
||||
from functools import cmp_to_key
|
||||
from itertools import count, groupby
|
||||
from itertools import count
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from kobo.rpmlib import parse_nvra
|
||||
import rpm
|
||||
|
||||
import pungi.common
|
||||
import pungi.dnf_wrapper
|
||||
import pungi.multilib_dnf
|
||||
import pungi.util
|
||||
from pungi import arch_utils
|
||||
from pungi.linker import Linker
|
||||
from pungi.profiler import Profiler
|
||||
from pungi.util import DEBUG_PATTERNS
|
||||
|
@ -248,36 +245,12 @@ class Gather(GatherBase):
|
|||
# from lookaside. This can be achieved by removing any package that is
|
||||
# also in lookaside from the list.
|
||||
lookaside_pkgs = set()
|
||||
for pkg in package_list:
|
||||
if pkg.repoid in self.opts.lookaside_repos:
|
||||
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
||||
|
||||
if self.opts.lookaside_repos:
|
||||
# We will call `latest()` to get the highest version packages only.
|
||||
# However, that is per name and architecture. If a package switches
|
||||
# from arched to noarch or the other way, it is possible that the
|
||||
# package_list contains different versions in main repos and in
|
||||
# lookaside repos.
|
||||
# We need to manually filter the latest version.
|
||||
def vercmp(x, y):
|
||||
return rpm.labelCompare(x[1], y[1])
|
||||
|
||||
# Annotate the packages with their version.
|
||||
versioned_packages = [
|
||||
(pkg, (str(pkg.epoch) or "0", pkg.version, pkg.release))
|
||||
for pkg in package_list
|
||||
]
|
||||
# Sort the packages newest first.
|
||||
sorted_packages = sorted(
|
||||
versioned_packages, key=cmp_to_key(vercmp), reverse=True
|
||||
)
|
||||
# Group packages by version, take the first group and discard the
|
||||
# version info from the tuple.
|
||||
package_list = list(
|
||||
x[0] for x in next(groupby(sorted_packages, key=lambda x: x[1]))[1]
|
||||
)
|
||||
|
||||
# Now we can decide what is used from lookaside.
|
||||
for pkg in package_list:
|
||||
if pkg.repoid in self.opts.lookaside_repos:
|
||||
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
||||
if self.opts.greedy_method == "all":
|
||||
return list(package_list)
|
||||
|
||||
all_pkgs = []
|
||||
for pkg in package_list:
|
||||
|
@ -290,22 +263,17 @@ class Gather(GatherBase):
|
|||
|
||||
if not debuginfo:
|
||||
native_pkgs = set(
|
||||
self.q_native_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
||||
self.q_native_binary_packages.filter(pkg=all_pkgs).apply()
|
||||
)
|
||||
multilib_pkgs = set(
|
||||
self.q_multilib_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
||||
self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply()
|
||||
)
|
||||
else:
|
||||
native_pkgs = set(
|
||||
self.q_native_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
||||
)
|
||||
native_pkgs = set(self.q_native_debug_packages.filter(pkg=all_pkgs).apply())
|
||||
multilib_pkgs = set(
|
||||
self.q_multilib_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
||||
self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply()
|
||||
)
|
||||
|
||||
if self.opts.greedy_method == "all":
|
||||
return list(native_pkgs | multilib_pkgs)
|
||||
|
||||
result = set()
|
||||
|
||||
# try seen native packages first
|
||||
|
@ -424,7 +392,9 @@ class Gather(GatherBase):
|
|||
"""Given an name of a queue (stored as attribute in `self`), exclude
|
||||
all given packages and keep only the latest per package name and arch.
|
||||
"""
|
||||
setattr(self, queue, getattr(self, queue).filter(pkg__neq=exclude).apply())
|
||||
setattr(
|
||||
self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply()
|
||||
)
|
||||
|
||||
@Profiler("Gather._apply_excludes()")
|
||||
def _apply_excludes(self, excludes):
|
||||
|
@ -530,21 +500,12 @@ class Gather(GatherBase):
|
|||
name__glob=pattern[:-2]
|
||||
).apply()
|
||||
else:
|
||||
kwargs = {"name__glob": pattern}
|
||||
if "." in pattern:
|
||||
# The pattern could be name.arch. Check if the
|
||||
# arch is valid, and if yes, make a more
|
||||
# specific query.
|
||||
name, arch = pattern.split(".", 1)
|
||||
if arch in arch_utils.arches:
|
||||
kwargs["name__glob"] = name
|
||||
kwargs["arch__eq"] = arch
|
||||
pkgs = self.q_binary_packages.filter(**kwargs).apply()
|
||||
pkgs = self.q_binary_packages.filter(
|
||||
name__glob=pattern
|
||||
).apply()
|
||||
|
||||
if not pkgs:
|
||||
self.logger.error(
|
||||
"Could not find a match for %s in any configured repo", pattern
|
||||
)
|
||||
self.logger.error("No package matches pattern %s" % pattern)
|
||||
|
||||
# The pattern could have been a glob. In that case we want to
|
||||
# group the packages by name and get best match in those
|
||||
|
@ -655,6 +616,7 @@ class Gather(GatherBase):
|
|||
return added
|
||||
|
||||
for pkg in self.result_debug_packages.copy():
|
||||
|
||||
if pkg not in self.finished_add_debug_package_deps:
|
||||
deps = self._get_package_deps(pkg, debuginfo=True)
|
||||
for i, req in deps:
|
||||
|
@ -822,6 +784,7 @@ class Gather(GatherBase):
|
|||
continue
|
||||
|
||||
debug_pkgs = []
|
||||
pkg_in_lookaside = pkg.repoid in self.opts.lookaside_repos
|
||||
for i in candidates:
|
||||
if pkg.arch != i.arch:
|
||||
continue
|
||||
|
@ -829,7 +792,7 @@ class Gather(GatherBase):
|
|||
# If it's not debugsource package or does not match name of
|
||||
# the package, we don't want it in.
|
||||
continue
|
||||
if self.is_from_lookaside(i):
|
||||
if i.repoid in self.opts.lookaside_repos or pkg_in_lookaside:
|
||||
self._set_flag(i, PkgFlag.lookaside)
|
||||
if i not in self.result_debug_packages:
|
||||
added.add(i)
|
||||
|
|
|
@ -306,6 +306,11 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||
if variant.type in ("addon",) or variant.is_empty:
|
||||
return
|
||||
|
||||
compose.log_debug(
|
||||
"on arch '%s' looking at variant '%s' of type '%s'"
|
||||
% (arch, variant, variant.type)
|
||||
)
|
||||
|
||||
if not timestamp:
|
||||
timestamp = int(time.time())
|
||||
else:
|
||||
|
|
|
@ -297,7 +297,7 @@ class BuildinstallPhase(PhaseBase):
|
|||
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
||||
)
|
||||
|
||||
for variant, cmd in commands:
|
||||
for (variant, cmd) in commands:
|
||||
self.pool.add(BuildinstallThread(self.pool))
|
||||
self.pool.queue_put(
|
||||
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
||||
|
@ -364,17 +364,9 @@ BOOT_CONFIGS = [
|
|||
"EFI/BOOT/BOOTX64.conf",
|
||||
"EFI/BOOT/grub.cfg",
|
||||
]
|
||||
BOOT_IMAGES = [
|
||||
"images/efiboot.img",
|
||||
]
|
||||
|
||||
|
||||
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
||||
"""
|
||||
Put escaped volume ID and possibly kickstart file into the boot
|
||||
configuration files.
|
||||
:returns: list of paths to modified config files
|
||||
"""
|
||||
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
||||
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
||||
found_configs = []
|
||||
|
@ -382,6 +374,7 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||
config_path = os.path.join(path, config)
|
||||
if not os.path.exists(config_path):
|
||||
continue
|
||||
found_configs.append(config)
|
||||
|
||||
with open(config_path, "r") as f:
|
||||
data = original_data = f.read()
|
||||
|
@ -401,13 +394,8 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||
with open(config_path, "w") as f:
|
||||
f.write(data)
|
||||
|
||||
if data != original_data:
|
||||
found_configs.append(config)
|
||||
if logger:
|
||||
# Generally lorax should create file with correct volume id
|
||||
# already. If we don't have a kickstart, this function should
|
||||
# be a no-op.
|
||||
logger.info("Boot config %s changed" % config_path)
|
||||
if logger and data != original_data:
|
||||
logger.info("Boot config %s changed" % config_path)
|
||||
|
||||
return found_configs
|
||||
|
||||
|
@ -446,32 +434,31 @@ def tweak_buildinstall(
|
|||
if kickstart_file and found_configs:
|
||||
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
||||
|
||||
images = [os.path.join(tmp_dir, img) for img in BOOT_IMAGES]
|
||||
if found_configs:
|
||||
for image in images:
|
||||
if not os.path.isfile(image):
|
||||
continue
|
||||
images = [
|
||||
os.path.join(tmp_dir, "images", "efiboot.img"),
|
||||
]
|
||||
for image in images:
|
||||
if not os.path.isfile(image):
|
||||
continue
|
||||
|
||||
with iso.mount(
|
||||
image,
|
||||
logger=compose._logger,
|
||||
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
||||
) as mount_tmp_dir:
|
||||
for config in found_configs:
|
||||
# Put each modified config file into the image (overwriting the
|
||||
# original).
|
||||
config_path = os.path.join(tmp_dir, config)
|
||||
config_in_image = os.path.join(mount_tmp_dir, config)
|
||||
with iso.mount(
|
||||
image,
|
||||
logger=compose._logger,
|
||||
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
||||
) as mount_tmp_dir:
|
||||
for config in BOOT_CONFIGS:
|
||||
config_path = os.path.join(tmp_dir, config)
|
||||
config_in_image = os.path.join(mount_tmp_dir, config)
|
||||
|
||||
if os.path.isfile(config_in_image):
|
||||
cmd = [
|
||||
"cp",
|
||||
"-v",
|
||||
"--remove-destination",
|
||||
config_path,
|
||||
config_in_image,
|
||||
]
|
||||
run(cmd)
|
||||
if os.path.isfile(config_in_image):
|
||||
cmd = [
|
||||
"cp",
|
||||
"-v",
|
||||
"--remove-destination",
|
||||
config_path,
|
||||
config_in_image,
|
||||
]
|
||||
run(cmd)
|
||||
|
||||
# HACK: make buildinstall files world readable
|
||||
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
||||
|
|
|
@ -369,7 +369,7 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||
if self.compose.notifier:
|
||||
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
||||
|
||||
for cmd, variant, arch in commands:
|
||||
for (cmd, variant, arch) in commands:
|
||||
self.pool.add(CreateIsoThread(self.pool))
|
||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
|||
for arch in sorted(arches):
|
||||
commands.append((config, variant, arch))
|
||||
|
||||
for config, variant, arch in commands:
|
||||
for (config, variant, arch) in commands:
|
||||
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
||||
self.pool.queue_put((self.compose, config, variant, arch))
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ class GatherPhase(PhaseBase):
|
|||
|
||||
# check whether variants from configuration value
|
||||
# 'variant_as_lookaside' are correct
|
||||
for requiring, required in variant_as_lookaside:
|
||||
for (requiring, required) in variant_as_lookaside:
|
||||
if requiring in all_variants and required not in all_variants:
|
||||
errors.append(
|
||||
"variant_as_lookaside: variant %r doesn't exist but is "
|
||||
|
@ -100,7 +100,7 @@ class GatherPhase(PhaseBase):
|
|||
|
||||
# check whether variants from configuration value
|
||||
# 'variant_as_lookaside' have same architectures
|
||||
for requiring, required in variant_as_lookaside:
|
||||
for (requiring, required) in variant_as_lookaside:
|
||||
if (
|
||||
requiring in all_variants
|
||||
and required in all_variants
|
||||
|
@ -236,7 +236,7 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets, methods):
|
|||
if not hasattr(compose, "_gather_reused_variant_arch"):
|
||||
setattr(compose, "_gather_reused_variant_arch", [])
|
||||
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
||||
for requiring, required in variant_as_lookaside:
|
||||
for (requiring, required) in variant_as_lookaside:
|
||||
if (
|
||||
requiring == variant.uid
|
||||
and (required, arch) not in compose._gather_reused_variant_arch
|
||||
|
@ -469,7 +469,9 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
|||
)
|
||||
|
||||
else:
|
||||
|
||||
for source_name in ("module", "comps", "json"):
|
||||
|
||||
packages, groups, filter_packages = get_variant_packages(
|
||||
compose, arch, variant, source_name, package_sets
|
||||
)
|
||||
|
@ -574,6 +576,7 @@ def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs
|
|||
move_to_parent_pkgs = _mk_pkg_map()
|
||||
removed_pkgs = _mk_pkg_map()
|
||||
for pkg_type, pkgs in pkg_map.items():
|
||||
|
||||
new_pkgs = []
|
||||
for pkg in pkgs:
|
||||
pkg_path = pkg["path"]
|
||||
|
@ -645,10 +648,9 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||
compose.paths.work.topdir(arch="global"), "download"
|
||||
)
|
||||
+ "/",
|
||||
"koji": lambda: compose.conf.get(
|
||||
"koji_cache",
|
||||
pungi.wrappers.kojiwrapper.KojiWrapper(compose).koji_module.config.topdir,
|
||||
).rstrip("/")
|
||||
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
||||
compose
|
||||
).koji_module.config.topdir.rstrip("/")
|
||||
+ "/",
|
||||
"kojimock": lambda: pungi.wrappers.kojiwrapper.KojiMockWrapper(
|
||||
compose,
|
||||
|
|
|
@ -47,15 +47,9 @@ class FakePackage(object):
|
|||
|
||||
@property
|
||||
def files(self):
|
||||
paths = []
|
||||
# createrepo_c.Package.files is a tuple, but its length differs across
|
||||
# versions. The constants define index at which the related value is
|
||||
# located.
|
||||
for entry in self.pkg.files:
|
||||
paths.append(
|
||||
os.path.join(entry[cr.FILE_ENTRY_PATH], entry[cr.FILE_ENTRY_NAME])
|
||||
)
|
||||
return paths
|
||||
return [
|
||||
os.path.join(dirname, basename) for (_, dirname, basename) in self.pkg.files
|
||||
]
|
||||
|
||||
@property
|
||||
def provides(self):
|
||||
|
|
|
@ -25,7 +25,6 @@ from productmd.rpms import Rpms
|
|||
# results will be pulled into the compose.
|
||||
EXTENSIONS = {
|
||||
"docker": ["tar.gz", "tar.xz"],
|
||||
"iso": ["iso"],
|
||||
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||
"qcow": ["qcow"],
|
||||
"qcow2": ["qcow2"],
|
||||
|
@ -40,7 +39,6 @@ EXTENSIONS = {
|
|||
"vdi": ["vdi"],
|
||||
"vmdk": ["vmdk"],
|
||||
"vpc": ["vhd"],
|
||||
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||
"vsphere-ova": ["vsphere.ova"],
|
||||
}
|
||||
|
||||
|
@ -346,9 +344,7 @@ class CreateImageBuildThread(WorkerThread):
|
|||
# let's not change filename of koji outputs
|
||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||
|
||||
src_file = compose.koji_downloader.get_file(
|
||||
os.path.realpath(image_info["path"])
|
||||
)
|
||||
src_file = os.path.realpath(image_info["path"])
|
||||
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
||||
|
||||
# Update image manifest
|
||||
|
|
|
@ -117,7 +117,7 @@ class LiveImagesPhase(
|
|||
|
||||
commands.append((cmd, variant, arch))
|
||||
|
||||
for cmd, variant, arch in commands:
|
||||
for (cmd, variant, arch) in commands:
|
||||
self.pool.add(CreateLiveImageThread(self.pool))
|
||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||
|
||||
|
@ -232,7 +232,7 @@ class CreateLiveImageThread(WorkerThread):
|
|||
"Got %d images from task %d, expected 1."
|
||||
% (len(image_path), output["task_id"])
|
||||
)
|
||||
image_path = compose.koji_downloader.get_file(image_path[0])
|
||||
image_path = image_path[0]
|
||||
filename = cmd.get("filename") or os.path.basename(image_path)
|
||||
destination = os.path.join(cmd["dest_dir"], filename)
|
||||
shutil.copy2(image_path, destination)
|
||||
|
|
|
@ -182,9 +182,7 @@ class LiveMediaThread(WorkerThread):
|
|||
# let's not change filename of koji outputs
|
||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||
|
||||
src_file = compose.koji_downloader.get_file(
|
||||
os.path.realpath(image_info["path"])
|
||||
)
|
||||
src_file = os.path.realpath(image_info["path"])
|
||||
linker.link(src_file, image_dest, link_type=link_type)
|
||||
|
||||
# Update image manifest
|
||||
|
|
|
@ -212,27 +212,16 @@ class RunOSBuildThread(WorkerThread):
|
|||
# image_dir is absolute path to which the image should be copied.
|
||||
# We also need the same path as relative to compose directory for
|
||||
# including in the metadata.
|
||||
if archive["type_name"] == "iso":
|
||||
# If the produced image is actually an ISO, it should go to
|
||||
# iso/ subdirectory.
|
||||
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||
rel_image_dir = compose.paths.compose.iso_dir(
|
||||
arch, variant, relative=True
|
||||
)
|
||||
else:
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||
rel_image_dir = compose.paths.compose.image_dir(
|
||||
variant, relative=True
|
||||
) % {"arch": arch}
|
||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||
rel_image_dir = compose.paths.compose.image_dir(variant, relative=True) % {
|
||||
"arch": arch
|
||||
}
|
||||
util.makedirs(image_dir)
|
||||
|
||||
image_dest = os.path.join(image_dir, archive["filename"])
|
||||
|
||||
src_file = compose.koji_downloader.get_file(
|
||||
os.path.join(
|
||||
koji.koji_module.pathinfo.imagebuild(build_info),
|
||||
archive["filename"],
|
||||
),
|
||||
src_file = os.path.join(
|
||||
koji.koji_module.pathinfo.imagebuild(build_info), archive["filename"]
|
||||
)
|
||||
|
||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||
|
@ -249,7 +238,7 @@ class RunOSBuildThread(WorkerThread):
|
|||
|
||||
# Update image manifest
|
||||
img = Image(compose.im)
|
||||
img.type = archive["type_name"] if archive["type_name"] != "iso" else "dvd"
|
||||
img.type = archive["type_name"]
|
||||
img.format = suffix
|
||||
img.path = os.path.join(rel_image_dir, archive["filename"])
|
||||
img.mtime = util.get_mtime(image_dest)
|
||||
|
|
|
@ -168,9 +168,7 @@ class OSTreeThread(WorkerThread):
|
|||
("unified-core", config.get("unified_core", False)),
|
||||
]
|
||||
)
|
||||
default_packages = ["pungi", "ostree", "rpm-ostree"]
|
||||
additional_packages = config.get("runroot_packages", [])
|
||||
packages = default_packages + additional_packages
|
||||
packages = ["pungi", "ostree", "rpm-ostree"]
|
||||
log_file = os.path.join(self.logdir, "runroot.log")
|
||||
mounts = [compose.topdir, config["ostree_repo"]]
|
||||
runroot = Runroot(compose, phase="ostree")
|
||||
|
|
|
@ -38,17 +38,12 @@ from pungi.phases.createrepo import add_modular_metadata
|
|||
|
||||
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
||||
result = {}
|
||||
|
||||
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
||||
for arch in compose.get_arches():
|
||||
compose.log_info("Populating package set for arch: %s", arch)
|
||||
is_multilib = is_arch_multilib(compose.conf, arch)
|
||||
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
||||
pkgset = global_pkgset.subset(
|
||||
arch,
|
||||
arches,
|
||||
exclusive_noarch=compose.conf["pkgset_exclusive_arch_considers_noarch"],
|
||||
inherit_to_noarch=compose.conf["pkgset_inherit_exclusive_arch_to_noarch"],
|
||||
)
|
||||
pkgset = global_pkgset.subset(arch, arches, exclusive_noarch=exclusive_noarch)
|
||||
pkgset.save_file_list(
|
||||
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
||||
remove_path_prefix=path_prefix,
|
||||
|
|
|
@ -26,12 +26,10 @@ import time
|
|||
import pgpy
|
||||
import rpm
|
||||
from six.moves import cPickle as pickle
|
||||
from functools import partial
|
||||
|
||||
import kobo.log
|
||||
import kobo.pkgset
|
||||
import kobo.rpmlib
|
||||
from kobo.shortcuts import compute_file_checksums
|
||||
|
||||
from kobo.threads import WorkerThread, ThreadPool
|
||||
|
||||
|
@ -213,31 +211,16 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||
|
||||
return self.rpms_by_arch
|
||||
|
||||
def subset(
|
||||
self, primary_arch, arch_list, exclusive_noarch=True, inherit_to_noarch=True
|
||||
):
|
||||
def subset(self, primary_arch, arch_list, exclusive_noarch=True):
|
||||
"""Create a subset of this package set that only includes
|
||||
packages compatible with"""
|
||||
pkgset = PackageSetBase(
|
||||
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
||||
)
|
||||
pkgset.merge(
|
||||
self,
|
||||
primary_arch,
|
||||
arch_list,
|
||||
exclusive_noarch=exclusive_noarch,
|
||||
inherit_to_noarch=inherit_to_noarch,
|
||||
)
|
||||
pkgset.merge(self, primary_arch, arch_list, exclusive_noarch=exclusive_noarch)
|
||||
return pkgset
|
||||
|
||||
def merge(
|
||||
self,
|
||||
other,
|
||||
primary_arch,
|
||||
arch_list,
|
||||
exclusive_noarch=True,
|
||||
inherit_to_noarch=True,
|
||||
):
|
||||
def merge(self, other, primary_arch, arch_list, exclusive_noarch=True):
|
||||
"""
|
||||
Merge ``other`` package set into this instance.
|
||||
"""
|
||||
|
@ -276,7 +259,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||
if i.file_path in self.file_cache:
|
||||
# TODO: test if it really works
|
||||
continue
|
||||
if inherit_to_noarch and exclusivearch_list and arch == "noarch":
|
||||
if exclusivearch_list and arch == "noarch":
|
||||
if is_excluded(i, exclusivearch_list, logger=self._logger):
|
||||
continue
|
||||
|
||||
|
@ -343,11 +326,6 @@ class FilelistPackageSet(PackageSetBase):
|
|||
return result
|
||||
|
||||
|
||||
# This is a marker to indicate package set with only extra builds/tasks and no
|
||||
# tasks.
|
||||
MISSING_KOJI_TAG = object()
|
||||
|
||||
|
||||
class KojiPackageSet(PackageSetBase):
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -364,7 +342,6 @@ class KojiPackageSet(PackageSetBase):
|
|||
extra_tasks=None,
|
||||
signed_packages_retries=0,
|
||||
signed_packages_wait=30,
|
||||
downloader=None,
|
||||
):
|
||||
"""
|
||||
Creates new KojiPackageSet.
|
||||
|
@ -402,7 +379,7 @@ class KojiPackageSet(PackageSetBase):
|
|||
:param int signed_packages_wait: How long to wait between search attemts.
|
||||
"""
|
||||
super(KojiPackageSet, self).__init__(
|
||||
name if name != MISSING_KOJI_TAG else "no-tag",
|
||||
name,
|
||||
sigkey_ordering=sigkey_ordering,
|
||||
arches=arches,
|
||||
logger=logger,
|
||||
|
@ -419,8 +396,6 @@ class KojiPackageSet(PackageSetBase):
|
|||
self.signed_packages_retries = signed_packages_retries
|
||||
self.signed_packages_wait = signed_packages_wait
|
||||
|
||||
self.downloader = downloader
|
||||
|
||||
def __getstate__(self):
|
||||
result = self.__dict__.copy()
|
||||
del result["koji_wrapper"]
|
||||
|
@ -542,28 +517,11 @@ class KojiPackageSet(PackageSetBase):
|
|||
# Check if this RPM is coming from scratch task. In this case, we already
|
||||
# know the path.
|
||||
if "path_from_task" in rpm_info:
|
||||
return self.downloader.get_file(rpm_info["path_from_task"])
|
||||
return rpm_info["path_from_task"]
|
||||
|
||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||
paths = []
|
||||
|
||||
if "getRPMChecksums" in self.koji_proxy.system.listMethods():
|
||||
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
checksums = self.koji_proxy.getRPMChecksums(
|
||||
rpm_info["id"], checksum_types=("sha256",)
|
||||
)
|
||||
if "sha256" in checksums.get(keyname, {}):
|
||||
computed = compute_file_checksums(pkg_path, ("sha256",))
|
||||
if computed["sha256"] != checksums[keyname]["sha256"]:
|
||||
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
||||
|
||||
else:
|
||||
|
||||
def checksum_validator(keyname, pkg_path):
|
||||
# Koji doesn't support checksums yet
|
||||
pass
|
||||
|
||||
attempts_left = self.signed_packages_retries + 1
|
||||
while attempts_left > 0:
|
||||
for sigkey in self.sigkey_ordering:
|
||||
|
@ -576,11 +534,8 @@ class KojiPackageSet(PackageSetBase):
|
|||
)
|
||||
if rpm_path not in paths:
|
||||
paths.append(rpm_path)
|
||||
path = self.downloader.get_file(
|
||||
rpm_path, partial(checksum_validator, sigkey)
|
||||
)
|
||||
if path:
|
||||
return path
|
||||
if os.path.isfile(rpm_path):
|
||||
return rpm_path
|
||||
|
||||
# No signed copy was found, wait a little and try again.
|
||||
attempts_left -= 1
|
||||
|
@ -593,18 +548,16 @@ class KojiPackageSet(PackageSetBase):
|
|||
# use an unsigned copy (if allowed)
|
||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||
paths.append(rpm_path)
|
||||
path = self.downloader.get_file(rpm_path, partial(checksum_validator, ""))
|
||||
if path:
|
||||
return path
|
||||
if os.path.isfile(rpm_path):
|
||||
return rpm_path
|
||||
|
||||
if self._allow_invalid_sigkeys and rpm_info["name"] not in self.packages:
|
||||
# use an unsigned copy (if allowed)
|
||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||
paths.append(rpm_path)
|
||||
path = self.downloader.get_file(rpm_path)
|
||||
if path:
|
||||
if os.path.isfile(rpm_path):
|
||||
self._invalid_sigkey_rpms.append(rpm_info)
|
||||
return path
|
||||
return rpm_path
|
||||
|
||||
self._invalid_sigkey_rpms.append(rpm_info)
|
||||
self.log_error(
|
||||
|
@ -634,9 +587,7 @@ class KojiPackageSet(PackageSetBase):
|
|||
inherit,
|
||||
)
|
||||
self.log_info("[BEGIN] %s" % msg)
|
||||
rpms, builds = [], []
|
||||
if tag != MISSING_KOJI_TAG:
|
||||
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
||||
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
||||
extra_rpms, extra_builds = self.get_extra_rpms()
|
||||
rpms += extra_rpms
|
||||
builds += extra_builds
|
||||
|
@ -741,15 +692,6 @@ class KojiPackageSet(PackageSetBase):
|
|||
:param include_packages: an iterable of tuples (package name, arch) that should
|
||||
be included.
|
||||
"""
|
||||
if len(self.sigkey_ordering) > 1 and (
|
||||
None in self.sigkey_ordering or "" in self.sigkey_ordering
|
||||
):
|
||||
self.log_warning(
|
||||
"Stop writing reuse file as unsigned packages are allowed "
|
||||
"in the compose."
|
||||
)
|
||||
return
|
||||
|
||||
reuse_file = compose.paths.work.pkgset_reuse_file(self.name)
|
||||
self.log_info("Writing pkgset reuse file: %s" % reuse_file)
|
||||
try:
|
||||
|
@ -766,12 +708,6 @@ class KojiPackageSet(PackageSetBase):
|
|||
"srpms_by_name": self.srpms_by_name,
|
||||
"extra_builds": self.extra_builds,
|
||||
"include_packages": include_packages,
|
||||
"inherit_to_noarch": compose.conf[
|
||||
"pkgset_inherit_exclusive_arch_to_noarch"
|
||||
],
|
||||
"exclusive_noarch": compose.conf[
|
||||
"pkgset_exclusive_arch_considers_noarch"
|
||||
],
|
||||
},
|
||||
f,
|
||||
protocol=pickle.HIGHEST_PROTOCOL,
|
||||
|
@ -866,8 +802,6 @@ class KojiPackageSet(PackageSetBase):
|
|||
self.log_debug("Failed to load reuse file: %s" % str(e))
|
||||
return False
|
||||
|
||||
inherit_to_noarch = compose.conf["pkgset_inherit_exclusive_arch_to_noarch"]
|
||||
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
||||
if (
|
||||
reuse_data["allow_invalid_sigkeys"] == self._allow_invalid_sigkeys
|
||||
and reuse_data["packages"] == self.packages
|
||||
|
@ -875,10 +809,6 @@ class KojiPackageSet(PackageSetBase):
|
|||
and reuse_data["extra_builds"] == self.extra_builds
|
||||
and reuse_data["sigkeys"] == self.sigkey_ordering
|
||||
and reuse_data["include_packages"] == include_packages
|
||||
# If the value is not present in reuse data, the compose was
|
||||
# generated with older version of Pungi. Best to not reuse.
|
||||
and reuse_data.get("inherit_to_noarch") == inherit_to_noarch
|
||||
and reuse_data.get("exclusive_noarch") == exclusive_noarch
|
||||
):
|
||||
self.log_info("Copying repo data for reuse: %s" % old_repo_dir)
|
||||
copy_all(old_repo_dir, repo_dir)
|
||||
|
@ -894,6 +824,69 @@ class KojiPackageSet(PackageSetBase):
|
|||
|
||||
|
||||
class KojiMockPackageSet(KojiPackageSet):
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
koji_wrapper,
|
||||
sigkey_ordering,
|
||||
arches=None,
|
||||
logger=None,
|
||||
packages=None,
|
||||
allow_invalid_sigkeys=False,
|
||||
populate_only_packages=False,
|
||||
cache_region=None,
|
||||
extra_builds=None,
|
||||
extra_tasks=None,
|
||||
signed_packages_retries=0,
|
||||
signed_packages_wait=30,
|
||||
):
|
||||
"""
|
||||
Creates new KojiPackageSet.
|
||||
|
||||
:param list sigkey_ordering: Ordered list of sigkey strings. When
|
||||
getting package from Koji, KojiPackageSet tries to get the package
|
||||
signed by sigkey from this list. If None or "" appears in this
|
||||
list, unsigned package is used.
|
||||
:param list arches: List of arches to get the packages for.
|
||||
:param logging.Logger logger: Logger instance to use for logging.
|
||||
:param list packages: List of package names to be used when
|
||||
`allow_invalid_sigkeys` or `populate_only_packages` is set.
|
||||
:param bool allow_invalid_sigkeys: When True, packages *not* listed in
|
||||
the `packages` list are added to KojiPackageSet even if they have
|
||||
invalid sigkey. This is useful in case Koji tag contains some
|
||||
unsigned packages, but we know they won't appear in a compose.
|
||||
When False, all packages in Koji tag must have valid sigkey as
|
||||
defined in `sigkey_ordering`.
|
||||
:param bool populate_only_packages. When True, only packages in
|
||||
`packages` list are added to KojiPackageSet. This can save time
|
||||
when generating compose from predefined list of packages from big
|
||||
Koji tag.
|
||||
When False, all packages from Koji tag are added to KojiPackageSet.
|
||||
:param dogpile.cache.CacheRegion cache_region: If set, the CacheRegion
|
||||
will be used to cache the list of RPMs per Koji tag, so next calls
|
||||
of the KojiPackageSet.populate(...) method won't try fetching it
|
||||
again.
|
||||
:param list extra_builds: Extra builds NVRs to get from Koji and include
|
||||
in the package set.
|
||||
:param list extra_tasks: Extra RPMs defined as Koji task IDs to get from Koji
|
||||
and include in the package set. Useful when building testing compose
|
||||
with RPM scratch builds.
|
||||
"""
|
||||
super(KojiMockPackageSet, self).__init__(
|
||||
name,
|
||||
koji_wrapper=koji_wrapper,
|
||||
sigkey_ordering=sigkey_ordering,
|
||||
arches=arches,
|
||||
logger=logger,
|
||||
packages=packages,
|
||||
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||
populate_only_packages=populate_only_packages,
|
||||
cache_region=cache_region,
|
||||
extra_builds=extra_builds,
|
||||
extra_tasks=extra_tasks,
|
||||
signed_packages_retries=signed_packages_retries,
|
||||
signed_packages_wait=signed_packages_wait,
|
||||
)
|
||||
|
||||
def _is_rpm_signed(self, rpm_path) -> bool:
|
||||
ts = rpm.TransactionSet()
|
||||
|
|
|
@ -193,13 +193,17 @@ class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
|||
def __call__(self):
|
||||
compose = self.compose
|
||||
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
||||
package_sets = get_pkgset_from_koji(self.compose, self.koji_wrapper)
|
||||
return (package_sets, self.compose.koji_downloader.path_prefix)
|
||||
# path prefix must contain trailing '/'
|
||||
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
||||
package_sets = get_pkgset_from_koji(
|
||||
self.compose, self.koji_wrapper, path_prefix
|
||||
)
|
||||
return (package_sets, path_prefix)
|
||||
|
||||
|
||||
def get_pkgset_from_koji(compose, koji_wrapper):
|
||||
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
||||
event_info = get_koji_event_info(compose, koji_wrapper)
|
||||
return populate_global_pkgset(compose, koji_wrapper, event_info)
|
||||
return populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)
|
||||
|
||||
|
||||
def _add_module_to_variant(
|
||||
|
@ -228,7 +232,7 @@ def _add_module_to_variant(
|
|||
continue
|
||||
typedir = koji_wrapper.koji_module.pathinfo.typedir(build, archive["btype"])
|
||||
filename = archive["filename"]
|
||||
file_path = compose.koji_downloader.get_file(os.path.join(typedir, filename))
|
||||
file_path = os.path.join(typedir, filename)
|
||||
try:
|
||||
# If there are two dots, the arch is in the middle. MBS uploads
|
||||
# files with actual architecture in the filename, but Pungi deals
|
||||
|
@ -266,14 +270,9 @@ def _add_module_to_variant(
|
|||
"Module %s does not have metadata for arch %s and is not filtered "
|
||||
"out via filter_modules option." % (nsvc, arch)
|
||||
)
|
||||
try:
|
||||
mod_stream = read_single_module_stream_from_file(
|
||||
mmds[filename], compose, arch, build
|
||||
)
|
||||
except Exception as exc:
|
||||
# libmodulemd raises various GLib exceptions with not very helpful
|
||||
# messages. Let's replace it with something more useful.
|
||||
raise RuntimeError("Failed to read %s: %s", mmds[filename], str(exc))
|
||||
mod_stream = read_single_module_stream_from_file(
|
||||
mmds[filename], compose, arch, build
|
||||
)
|
||||
if mod_stream:
|
||||
added = True
|
||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
||||
|
@ -396,13 +395,7 @@ def _is_filtered_out(compose, variant, arch, module_name, module_stream):
|
|||
|
||||
|
||||
def _get_modules_from_koji(
|
||||
compose,
|
||||
koji_wrapper,
|
||||
event,
|
||||
variant,
|
||||
variant_tags,
|
||||
tag_to_mmd,
|
||||
exclude_module_ns,
|
||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd, exclude_module_ns
|
||||
):
|
||||
"""
|
||||
Loads modules for given `variant` from koji `session`, adds them to
|
||||
|
@ -677,7 +670,7 @@ def _get_modules_from_koji_tags(
|
|||
)
|
||||
|
||||
|
||||
def populate_global_pkgset(compose, koji_wrapper, event):
|
||||
def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
||||
all_arches = get_all_arches(compose)
|
||||
|
||||
# List of compose tags from which we create this compose
|
||||
|
@ -771,12 +764,7 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||
|
||||
if extra_modules:
|
||||
_add_extra_modules_to_variant(
|
||||
compose,
|
||||
koji_wrapper,
|
||||
variant,
|
||||
extra_modules,
|
||||
variant_tags,
|
||||
tag_to_mmd,
|
||||
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
||||
)
|
||||
|
||||
variant_scratch_modules = get_variant_data(
|
||||
|
@ -803,23 +791,17 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||
|
||||
pkgsets = []
|
||||
|
||||
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
||||
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
||||
|
||||
if not pkgset_koji_tags and (extra_builds or extra_tasks):
|
||||
# We have extra packages to pull in, but no tag to merge them with.
|
||||
compose_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
||||
pkgset_koji_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
||||
|
||||
# Get package set for each compose tag and merge it to global package
|
||||
# list. Also prepare per-variant pkgset, because we do not have list
|
||||
# of binary RPMs in module definition - there is just list of SRPMs.
|
||||
for compose_tag in compose_tags:
|
||||
compose.log_info("Loading package set for tag %s", compose_tag)
|
||||
kwargs = {}
|
||||
if compose_tag in pkgset_koji_tags:
|
||||
kwargs["extra_builds"] = extra_builds
|
||||
kwargs["extra_tasks"] = extra_tasks
|
||||
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
||||
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
||||
else:
|
||||
extra_builds = []
|
||||
extra_tasks = []
|
||||
|
||||
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
||||
compose_tag,
|
||||
|
@ -831,10 +813,10 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||
populate_only_packages=populate_only_packages_to_gather,
|
||||
cache_region=compose.cache_region,
|
||||
extra_builds=extra_builds,
|
||||
extra_tasks=extra_tasks,
|
||||
signed_packages_retries=compose.conf["signed_packages_retries"],
|
||||
signed_packages_wait=compose.conf["signed_packages_wait"],
|
||||
downloader=compose.koji_downloader,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
# Check if we have cache for this tag from previous compose. If so, use
|
||||
|
@ -898,6 +880,7 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||
)
|
||||
for variant in compose.all_variants.values():
|
||||
if compose_tag in variant_tags[variant]:
|
||||
|
||||
# If it's a modular tag, store the package set for the module.
|
||||
for nsvc, koji_tag in variant.module_uid_to_koji_tag.items():
|
||||
if compose_tag == koji_tag:
|
||||
|
@ -920,7 +903,7 @@ def populate_global_pkgset(compose, koji_wrapper, event):
|
|||
MaterializedPackageSet.create,
|
||||
compose,
|
||||
pkgset,
|
||||
compose.koji_downloader.path_prefix,
|
||||
path_prefix,
|
||||
mmd=tag_to_mmd.get(pkgset.name),
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
import argparse
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
|
||||
from pungi.util import format_size
|
||||
|
||||
|
||||
LOCK_RE = re.compile(r".*\.lock(\|[A-Za-z0-9]+)*$")
|
||||
|
||||
|
||||
def should_be_cleaned_up(path, st, threshold):
|
||||
if st.st_nlink == 1 and st.st_mtime < threshold:
|
||||
# No other instances, older than limit
|
||||
return True
|
||||
|
||||
if LOCK_RE.match(path) and st.st_mtime < threshold:
|
||||
# Suspiciously old lock
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("CACHE_DIR")
|
||||
parser.add_argument("-n", "--dry-run", action="store_true")
|
||||
parser.add_argument("--verbose", action="store_true")
|
||||
parser.add_argument(
|
||||
"--max-age",
|
||||
help="how old files should be considered for deletion",
|
||||
default=7,
|
||||
type=int,
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
topdir = os.path.abspath(args.CACHE_DIR)
|
||||
max_age = args.max_age * 24 * 3600
|
||||
|
||||
cleaned_up = 0
|
||||
|
||||
threshold = time.time() - max_age
|
||||
for dirpath, dirnames, filenames in os.walk(topdir):
|
||||
for f in filenames:
|
||||
filepath = os.path.join(dirpath, f)
|
||||
st = os.stat(filepath)
|
||||
if should_be_cleaned_up(filepath, st, threshold):
|
||||
if args.verbose:
|
||||
print("RM %s" % filepath)
|
||||
cleaned_up += st.st_size
|
||||
if not args.dry_run:
|
||||
os.remove(filepath)
|
||||
if not dirnames and not filenames:
|
||||
if args.verbose:
|
||||
print("RMDIR %s" % dirpath)
|
||||
if not args.dry_run:
|
||||
os.rmdir(dirpath)
|
||||
|
||||
if args.dry_run:
|
||||
print("Would reclaim %s bytes." % format_size(cleaned_up))
|
||||
else:
|
||||
print("Reclaimed %s bytes." % format_size(cleaned_up))
|
|
@ -171,11 +171,32 @@ def main():
|
|||
group.add_argument(
|
||||
"--offline", action="store_true", help="Do not resolve git references."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--multi",
|
||||
metavar="DIR",
|
||||
help=(
|
||||
"Treat source as config for pungi-orchestrate and store dump into "
|
||||
"given directory."
|
||||
),
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
defines = config_utils.extract_defines(args.define)
|
||||
|
||||
if args.multi:
|
||||
if len(args.sources) > 1:
|
||||
parser.error("Only one multi config can be specified.")
|
||||
|
||||
return dump_multi_config(
|
||||
args.sources[0],
|
||||
dest=args.multi,
|
||||
defines=defines,
|
||||
just_dump=args.just_dump,
|
||||
event=args.freeze_event,
|
||||
offline=args.offline,
|
||||
)
|
||||
|
||||
return process_file(
|
||||
args.sources,
|
||||
defines=defines,
|
||||
|
|
|
@ -118,15 +118,15 @@ class PackagesGenerator:
|
|||
self.pkgs = dict()
|
||||
self.excluded_packages = excluded_packages
|
||||
self.included_packages = included_packages
|
||||
self.tmp_files = [] # type: list[Path]
|
||||
self.tmp_files = []
|
||||
for arch, arch_list in self.addon_repos.items():
|
||||
self.repo_arches[arch].extend(arch_list)
|
||||
self.repo_arches[arch].append(arch)
|
||||
|
||||
def __del__(self):
|
||||
for tmp_file in self.tmp_files:
|
||||
if tmp_file.exists():
|
||||
tmp_file.unlink()
|
||||
if os.path.exists(tmp_file):
|
||||
os.remove(tmp_file)
|
||||
|
||||
@staticmethod
|
||||
def _get_full_repo_path(repo_info: RepoInfo):
|
||||
|
@ -149,7 +149,8 @@ class PackagesGenerator:
|
|||
print(f'Warning message: "{message}"; warning type: "{warning_type}"')
|
||||
return True
|
||||
|
||||
def get_remote_file_content(self, file_url: AnyStr) -> AnyStr:
|
||||
@staticmethod
|
||||
def get_remote_file_content(file_url: AnyStr) -> AnyStr:
|
||||
"""
|
||||
Get content from a remote file and write it to a temp file
|
||||
:param file_url: url of a remote file
|
||||
|
@ -162,7 +163,6 @@ class PackagesGenerator:
|
|||
file_request.raise_for_status()
|
||||
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||
file_stream.write(file_request.content)
|
||||
self.tmp_files.append(Path(file_stream.name))
|
||||
return file_stream.name
|
||||
|
||||
@staticmethod
|
||||
|
@ -253,6 +253,7 @@ class PackagesGenerator:
|
|||
if repo_info.is_remote:
|
||||
repomd_record_file_path = self.get_remote_file_content(
|
||||
repomd_record_file_path)
|
||||
self.tmp_files.append(repomd_record_file_path)
|
||||
repomd_records_dict[repomd_record.type] = repomd_record_file_path
|
||||
|
||||
def _parse_module_repomd_record(
|
||||
|
@ -276,10 +277,10 @@ class PackagesGenerator:
|
|||
if repo_info.is_remote:
|
||||
repomd_record_file_path = self.get_remote_file_content(
|
||||
repomd_record_file_path)
|
||||
self.tmp_files.append(repomd_record_file_path)
|
||||
return list(self._parse_modules_file(
|
||||
repomd_record_file_path,
|
||||
))
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def compare_pkgs_version(package_1: Package, package_2: Package) -> int:
|
||||
|
|
|
@ -14,9 +14,6 @@ def send(cmd, data):
|
|||
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
||||
try:
|
||||
msg = fedora_messaging.api.Message(topic="pungi.{}".format(topic), body=data)
|
||||
if cmd == "ostree":
|
||||
# https://pagure.io/fedora-infrastructure/issue/10899
|
||||
msg.priority = 3
|
||||
fedora_messaging.api.publish(msg)
|
||||
except fedora_messaging.exceptions.PublishReturned as e:
|
||||
print("Fedora Messaging broker rejected message %s: %s" % (msg.id, e))
|
||||
|
|
|
@ -59,12 +59,7 @@ def read_modules_yaml_from_specific_repo(
|
|||
repo_path + '/',
|
||||
'repodata/repomd.xml',
|
||||
)
|
||||
packages_generator = PackagesGenerator(
|
||||
variants=[],
|
||||
excluded_packages=[],
|
||||
included_packages=[],
|
||||
)
|
||||
repomd_file_path = packages_generator.get_remote_file_content(
|
||||
repomd_file_path = PackagesGenerator.get_remote_file_content(
|
||||
file_url=repomd_url
|
||||
)
|
||||
else:
|
||||
|
@ -82,12 +77,7 @@ def read_modules_yaml_from_specific_repo(
|
|||
repo_path + '/',
|
||||
record.location_href,
|
||||
)
|
||||
packages_generator = PackagesGenerator(
|
||||
variants=[],
|
||||
excluded_packages=[],
|
||||
included_packages=[],
|
||||
)
|
||||
modules_yaml_path = packages_generator.get_remote_file_content(
|
||||
modules_yaml_path = PackagesGenerator.get_remote_file_content(
|
||||
file_url=modules_yaml_url
|
||||
)
|
||||
else:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import re
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import os
|
||||
|
@ -12,7 +11,7 @@ from productmd.common import parse_nvra
|
|||
|
||||
@dataclass
|
||||
class Package:
|
||||
nvra: dict
|
||||
nvra: str
|
||||
path: Path
|
||||
|
||||
|
||||
|
@ -24,30 +23,12 @@ def search_rpms(top_dir: Path) -> List[Package]:
|
|||
list: list of paths
|
||||
"""
|
||||
return [Package(
|
||||
nvra=parse_nvra(Path(path).stem),
|
||||
nvra=Path(path).stem,
|
||||
path=Path(path),
|
||||
) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
|
||||
|
||||
|
||||
def is_excluded_package(
|
||||
package: Package,
|
||||
excluded_packages: List[str],
|
||||
) -> bool:
|
||||
package_key = f'{package.nvra["name"]}.{package.nvra["arch"]}'
|
||||
return any(
|
||||
re.search(
|
||||
f'^{excluded_pkg}$',
|
||||
package_key,
|
||||
) or excluded_pkg in (package.nvra['name'], package_key)
|
||||
for excluded_pkg in excluded_packages
|
||||
)
|
||||
|
||||
|
||||
def copy_rpms(
|
||||
packages: List[Package],
|
||||
target_top_dir: Path,
|
||||
excluded_packages: List[str],
|
||||
):
|
||||
def copy_rpms(packages: List[Package], target_top_dir: Path):
|
||||
"""
|
||||
Search synced repos for rpms and prepare
|
||||
koji-like structure for pungi
|
||||
|
@ -59,9 +40,8 @@ def copy_rpms(
|
|||
Nothing:
|
||||
"""
|
||||
for package in packages:
|
||||
if is_excluded_package(package, excluded_packages):
|
||||
continue
|
||||
target_arch_dir = target_top_dir.joinpath(package.nvra['arch'])
|
||||
info = parse_nvra(package.nvra)
|
||||
target_arch_dir = target_top_dir.joinpath(info['arch'])
|
||||
target_file = target_arch_dir.joinpath(package.path.name)
|
||||
os.makedirs(target_arch_dir, exist_ok=True)
|
||||
|
||||
|
@ -77,19 +57,11 @@ def cli_main():
|
|||
parser = ArgumentParser()
|
||||
parser.add_argument('-p', '--path', required=True, type=Path)
|
||||
parser.add_argument('-t', '--target', required=True, type=Path)
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
'--excluded-packages',
|
||||
required=False,
|
||||
nargs='+',
|
||||
type=str,
|
||||
default=[],
|
||||
)
|
||||
|
||||
namespace = parser.parse_args()
|
||||
|
||||
rpms = search_rpms(namespace.path)
|
||||
copy_rpms(rpms, namespace.target, namespace.excluded_packages)
|
||||
copy_rpms(rpms, namespace.target)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -319,6 +319,7 @@ def get_arguments(config):
|
|||
|
||||
|
||||
def main():
|
||||
|
||||
config = pungi.config.Config()
|
||||
opts = get_arguments(config)
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ from pungi.phases import PHASES_NAMES
|
|||
from pungi import get_full_version, util
|
||||
from pungi.errors import UnsignedPackagesError
|
||||
from pungi.wrappers import kojiwrapper
|
||||
from pungi.util import rmtree
|
||||
|
||||
|
||||
# force C locales
|
||||
|
@ -234,16 +233,9 @@ def main():
|
|||
latest_link_status = opts.latest_link_status or None
|
||||
latest_link_components = opts.latest_link_components
|
||||
|
||||
import kobo.conf
|
||||
import kobo.log
|
||||
import productmd.composeinfo
|
||||
|
||||
if opts.label:
|
||||
try:
|
||||
productmd.composeinfo.verify_label(opts.label)
|
||||
except ValueError as ex:
|
||||
abort(str(ex))
|
||||
|
||||
from pungi.compose import Compose
|
||||
|
||||
logger = logging.getLogger("pungi")
|
||||
|
@ -310,9 +302,7 @@ def main():
|
|||
opts.target_dir,
|
||||
conf,
|
||||
compose_type=compose_type,
|
||||
compose_label=label,
|
||||
parent_compose_ids=opts.parent_compose_id,
|
||||
respin_of=opts.respin_of,
|
||||
compose_label=label
|
||||
)
|
||||
else:
|
||||
compose_dir = opts.compose_dir
|
||||
|
@ -392,14 +382,6 @@ def run_compose(
|
|||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
||||
|
||||
installed_pkgs_log = compose.paths.log.log_file("global", "installed-pkgs")
|
||||
compose.log_info("Logging installed packages to %s" % installed_pkgs_log)
|
||||
try:
|
||||
with open(installed_pkgs_log, "w") as f:
|
||||
subprocess.Popen(["rpm", "-qa"], stdout=f)
|
||||
except Exception as e:
|
||||
compose.log_warning("Failed to log installed packages: %s" % str(e))
|
||||
|
||||
compose.read_variants()
|
||||
|
||||
# dump the config file
|
||||
|
@ -691,7 +673,7 @@ def cli_main():
|
|||
except (Exception, KeyboardInterrupt) as ex:
|
||||
if COMPOSE:
|
||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
||||
COMPOSE.traceback()
|
||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||
COMPOSE.write_status("DOOMED")
|
||||
else:
|
||||
|
@ -700,8 +682,3 @@ def cli_main():
|
|||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
sys.exit(1)
|
||||
finally:
|
||||
# Remove repositories cloned during ExtraFiles phase
|
||||
process_id = os.getpid()
|
||||
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
||||
rmtree(directoy_to_remove)
|
||||
|
|
|
@ -279,7 +279,7 @@ class GitUrlResolveError(RuntimeError):
|
|||
pass
|
||||
|
||||
|
||||
def resolve_git_ref(repourl, ref, credential_helper=None):
|
||||
def resolve_git_ref(repourl, ref):
|
||||
"""Resolve a reference in a Git repo to a commit.
|
||||
|
||||
Raises RuntimeError if there was an error. Most likely cause is failure to
|
||||
|
@ -289,7 +289,7 @@ def resolve_git_ref(repourl, ref, credential_helper=None):
|
|||
# This looks like a commit ID already.
|
||||
return ref
|
||||
try:
|
||||
_, output = git_ls_remote(repourl, ref, credential_helper)
|
||||
_, output = git_ls_remote(repourl, ref)
|
||||
except RuntimeError as e:
|
||||
raise GitUrlResolveError(
|
||||
"ref does not exist in remote repo %s with the error %s %s"
|
||||
|
@ -316,7 +316,7 @@ def resolve_git_ref(repourl, ref, credential_helper=None):
|
|||
return lines[0].split()[0]
|
||||
|
||||
|
||||
def resolve_git_url(url, credential_helper=None):
|
||||
def resolve_git_url(url):
|
||||
"""Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,
|
||||
replace that specifier with actual SHA1 of the commit.
|
||||
|
||||
|
@ -335,7 +335,7 @@ def resolve_git_url(url, credential_helper=None):
|
|||
scheme = r.scheme.replace("git+", "")
|
||||
|
||||
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
||||
fragment = resolve_git_ref(baseurl, ref, credential_helper)
|
||||
fragment = resolve_git_ref(baseurl, ref)
|
||||
|
||||
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
||||
if "?#" in url:
|
||||
|
@ -354,18 +354,13 @@ class GitUrlResolver(object):
|
|||
self.offline = offline
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, url, branch=None, options=None):
|
||||
credential_helper = options.get("credential_helper") if options else None
|
||||
def __call__(self, url, branch=None):
|
||||
if self.offline:
|
||||
return branch or url
|
||||
key = (url, branch)
|
||||
if key not in self.cache:
|
||||
try:
|
||||
res = (
|
||||
resolve_git_ref(url, branch, credential_helper)
|
||||
if branch
|
||||
else resolve_git_url(url, credential_helper)
|
||||
)
|
||||
res = resolve_git_ref(url, branch) if branch else resolve_git_url(url)
|
||||
self.cache[key] = res
|
||||
except GitUrlResolveError as exc:
|
||||
self.cache[key] = exc
|
||||
|
@ -461,9 +456,6 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
|||
if not variant_uid and "%(variant)s" in i:
|
||||
continue
|
||||
try:
|
||||
# fmt: off
|
||||
# Black wants to add a comma after kwargs, but that's not valid in
|
||||
# Python 2.7
|
||||
args = get_format_substs(
|
||||
compose,
|
||||
variant=variant_uid,
|
||||
|
@ -475,7 +467,6 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
|||
base_product_version=base_product_version,
|
||||
**kwargs
|
||||
)
|
||||
# fmt: on
|
||||
volid = (i % args).format(**args)
|
||||
except KeyError as err:
|
||||
raise RuntimeError(
|
||||
|
@ -1000,12 +991,8 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
|||
|
||||
|
||||
@retry(wait_on=RuntimeError)
|
||||
def git_ls_remote(baseurl, ref, credential_helper=None):
|
||||
cmd = ["git"]
|
||||
if credential_helper:
|
||||
cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
||||
return run(cmd + ["ls-remote", baseurl, ref], universal_newlines=True)
|
||||
def git_ls_remote(baseurl, ref):
|
||||
return run(["git", "ls-remote", baseurl, ref], universal_newlines=True)
|
||||
|
||||
|
||||
def get_tz_offset():
|
||||
|
@ -1150,16 +1137,3 @@ def read_json_file(file_path):
|
|||
"""A helper function to read a JSON file."""
|
||||
with open(file_path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
UNITS = ["", "Ki", "Mi", "Gi", "Ti"]
|
||||
|
||||
|
||||
def format_size(sz):
|
||||
sz = float(sz)
|
||||
unit = 0
|
||||
while sz > 1024:
|
||||
sz /= 1024
|
||||
unit += 1
|
||||
|
||||
return "%.3g %sB" % (sz, UNITS[unit])
|
||||
|
|
|
@ -183,16 +183,15 @@ class CompsFilter(object):
|
|||
"""
|
||||
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
||||
for environment in self.tree.xpath("/comps/environment"):
|
||||
for parent_tag in ("grouplist", "optionlist"):
|
||||
for group in environment.xpath("%s/groupid" % parent_tag):
|
||||
if group.text not in all_groups:
|
||||
group.getparent().remove(group)
|
||||
for group in environment.xpath("grouplist/groupid"):
|
||||
if group.text not in all_groups:
|
||||
group.getparent().remove(group)
|
||||
|
||||
for group in environment.xpath("%s/groupid[@arch]" % parent_tag):
|
||||
value = group.attrib.get("arch")
|
||||
values = [v for v in re.split(r"[, ]+", value) if v]
|
||||
if arch not in values:
|
||||
group.getparent().remove(group)
|
||||
for group in environment.xpath("grouplist/groupid[@arch]"):
|
||||
value = group.attrib.get("arch")
|
||||
values = [v for v in re.split(r"[, ]+", value) if v]
|
||||
if arch not in values:
|
||||
group.getparent().remove(group)
|
||||
|
||||
def remove_empty_environments(self):
|
||||
"""
|
||||
|
|
|
@ -260,23 +260,20 @@ def get_isohybrid_cmd(iso_path, arch):
|
|||
return cmd
|
||||
|
||||
|
||||
def get_manifest_cmd(iso_name, xorriso=False, output_file=None):
|
||||
if not output_file:
|
||||
output_file = "%s.manifest" % iso_name
|
||||
|
||||
def get_manifest_cmd(iso_name, xorriso=False):
|
||||
if xorriso:
|
||||
return """xorriso -dev %s --find |
|
||||
tail -n+2 |
|
||||
tr -d "'" |
|
||||
cut -c2- |
|
||||
sort >> %s""" % (
|
||||
sort >> %s.manifest""" % (
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(output_file),
|
||||
)
|
||||
else:
|
||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s" % (
|
||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(iso_name),
|
||||
shlex_quote(output_file),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -14,23 +14,17 @@
|
|||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
|
||||
import requests
|
||||
import contextlib
|
||||
|
||||
import koji
|
||||
from kobo.shortcuts import run, force_list
|
||||
import six
|
||||
from six.moves import configparser, shlex_quote
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
from flufl.lock import Lock
|
||||
from datetime import timedelta
|
||||
|
||||
from .kojimock import KojiMock
|
||||
from .. import util
|
||||
|
@ -792,10 +786,11 @@ class KojiWrapper(object):
|
|||
if list_of_args is None and list_of_kwargs is None:
|
||||
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
||||
|
||||
if list_of_args is not None and not isinstance(list_of_args, list):
|
||||
raise ValueError("list_of_args must be list or None.")
|
||||
if list_of_kwargs is not None and not isinstance(list_of_kwargs, list):
|
||||
raise ValueError("list_of_kwargs must be list or None.")
|
||||
if type(list_of_args) not in [type(None), list] or type(list_of_kwargs) not in [
|
||||
type(None),
|
||||
list,
|
||||
]:
|
||||
raise ValueError("list_of_args and list_of_kwargs must be list or None.")
|
||||
|
||||
if list_of_kwargs is None:
|
||||
list_of_kwargs = [{}] * len(list_of_args)
|
||||
|
@ -809,9 +804,9 @@ class KojiWrapper(object):
|
|||
|
||||
koji_session.multicall = True
|
||||
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
||||
if not isinstance(args, list):
|
||||
if type(args) != list:
|
||||
args = [args]
|
||||
if not isinstance(kwargs, dict):
|
||||
if type(kwargs) != dict:
|
||||
raise ValueError("Every item in list_of_kwargs must be a dict")
|
||||
koji_session_fnc(*args, **kwargs)
|
||||
|
||||
|
@ -819,7 +814,7 @@ class KojiWrapper(object):
|
|||
|
||||
if not responses:
|
||||
return None
|
||||
if not isinstance(responses, list):
|
||||
if type(responses) != list:
|
||||
raise ValueError(
|
||||
"Fault element was returned for multicall of method %r: %r"
|
||||
% (koji_session_fnc, responses)
|
||||
|
@ -835,7 +830,7 @@ class KojiWrapper(object):
|
|||
# a one-item array containing the result value,
|
||||
# or a struct of the form found inside the standard <fault> element.
|
||||
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
||||
if isinstance(response, list):
|
||||
if type(response) == list:
|
||||
if not response:
|
||||
raise ValueError(
|
||||
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
||||
|
@ -940,176 +935,3 @@ def get_buildroot_rpms(compose, task_id):
|
|||
continue
|
||||
result.append(i)
|
||||
return sorted(result)
|
||||
|
||||
|
||||
class KojiDownloadProxy:
|
||||
def __init__(self, topdir, topurl, cache_dir, logger):
|
||||
if not topdir:
|
||||
# This will only happen if there is either no koji_profile
|
||||
# configured, or the profile doesn't have a topdir. In the first
|
||||
# case there will be no koji interaction, and the second indicates
|
||||
# broken koji configuration.
|
||||
# We can pretend to have local access in both cases to avoid any
|
||||
# external requests.
|
||||
self.has_local_access = True
|
||||
return
|
||||
|
||||
self.cache_dir = cache_dir
|
||||
self.logger = logger
|
||||
|
||||
self.topdir = topdir
|
||||
self.topurl = topurl
|
||||
|
||||
# If cache directory is configured, we want to use it (even if we
|
||||
# actually have local access to the storage).
|
||||
self.has_local_access = not bool(cache_dir)
|
||||
# This is used for temporary downloaded files. The suffix is unique
|
||||
# per-process. To prevent threads in the same process from colliding, a
|
||||
# thread id is added later.
|
||||
self.unique_suffix = "%s.%s" % (socket.gethostname(), os.getpid())
|
||||
self.session = None
|
||||
if not self.has_local_access:
|
||||
self.session = requests.Session()
|
||||
|
||||
@property
|
||||
def path_prefix(self):
|
||||
dir = self.topdir if self.has_local_access else self.cache_dir
|
||||
return dir.rstrip("/") + "/"
|
||||
|
||||
@classmethod
|
||||
def from_config(klass, conf, logger):
|
||||
topdir = None
|
||||
topurl = None
|
||||
cache_dir = None
|
||||
if "koji_profile" in conf:
|
||||
koji_module = koji.get_profile_module(conf["koji_profile"])
|
||||
topdir = koji_module.config.topdir
|
||||
topurl = koji_module.config.topurl
|
||||
|
||||
cache_dir = conf.get("koji_cache")
|
||||
if cache_dir:
|
||||
cache_dir = cache_dir.rstrip("/") + "/"
|
||||
return klass(topdir, topurl, cache_dir, logger)
|
||||
|
||||
@util.retry(wait_on=requests.exceptions.RequestException)
|
||||
def _download(self, url, dest):
|
||||
"""Download file into given location
|
||||
|
||||
:param str url: URL of the file to download
|
||||
:param str dest: file path to store the result in
|
||||
:returns: path to the downloaded file (same as dest) or None if the URL
|
||||
"""
|
||||
with self.session.get(url, stream=True) as r:
|
||||
if r.status_code == 404:
|
||||
self.logger.warning("GET %s NOT FOUND", url)
|
||||
return None
|
||||
if r.status_code != 200:
|
||||
self.logger.error("GET %s %s", url, r.status_code)
|
||||
r.raise_for_status()
|
||||
# The exception from here will be retried by the decorator.
|
||||
|
||||
file_size = int(r.headers.get("Content-Length", 0))
|
||||
self.logger.info("GET %s OK %s", url, util.format_size(file_size))
|
||||
with open(dest, "wb") as f:
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
return dest
|
||||
|
||||
def _delete(self, path):
|
||||
"""Try to delete file at given path and ignore errors."""
|
||||
try:
|
||||
os.remove(path)
|
||||
except Exception:
|
||||
self.logger.warning("Failed to delete %s", path)
|
||||
|
||||
def _atomic_download(self, url, dest, validator):
|
||||
"""Atomically download a file
|
||||
|
||||
:param str url: URL of the file to download
|
||||
:param str dest: file path to store the result in
|
||||
:returns: path to the downloaded file (same as dest) or None if the URL
|
||||
return 404.
|
||||
"""
|
||||
temp_file = "%s.%s.%s" % (dest, self.unique_suffix, threading.get_ident())
|
||||
|
||||
# First download to the temporary location.
|
||||
try:
|
||||
if self._download(url, temp_file) is None:
|
||||
# The file was not found.
|
||||
return None
|
||||
except Exception:
|
||||
# Download failed, let's make sure to clean up potentially partial
|
||||
# temporary file.
|
||||
self._delete(temp_file)
|
||||
raise
|
||||
|
||||
# Check if the temporary file is correct (assuming we were provided a
|
||||
# validator function).
|
||||
try:
|
||||
if validator:
|
||||
validator(temp_file)
|
||||
except Exception:
|
||||
# Validation failed. Let's delete the problematic file and re-raise
|
||||
# the exception.
|
||||
self._delete(temp_file)
|
||||
raise
|
||||
|
||||
# Atomically move the temporary file into final location
|
||||
os.rename(temp_file, dest)
|
||||
return dest
|
||||
|
||||
def _download_file(self, path, validator):
|
||||
"""Ensure file on Koji volume in ``path`` is present in the local
|
||||
cache.
|
||||
|
||||
:returns: path to the local file or None if file is not found
|
||||
"""
|
||||
url = path.replace(self.topdir, self.topurl)
|
||||
destination_file = path.replace(self.topdir, self.cache_dir)
|
||||
util.makedirs(os.path.dirname(destination_file))
|
||||
|
||||
lock = Lock(destination_file + ".lock")
|
||||
# Hold the lock for this file for 5 minutes. If another compose needs
|
||||
# the same file but it's not downloaded yet, the process will wait.
|
||||
#
|
||||
# If the download finishes in time, the downloaded file will be used
|
||||
# here.
|
||||
#
|
||||
# If the download takes longer, this process will steal the lock and
|
||||
# start its own download.
|
||||
#
|
||||
# That should not be a problem: the same file will be downloaded and
|
||||
# then replaced atomically on the filesystem. If the original process
|
||||
# managed to hardlink the first file already, that hardlink will be
|
||||
# broken, but that will only result in the same file stored twice.
|
||||
lock.lifetime = timedelta(minutes=5)
|
||||
|
||||
with lock:
|
||||
# Check if the file already exists. If yes, return the path.
|
||||
if os.path.exists(destination_file):
|
||||
# Update mtime of the file. This covers the case of packages in the
|
||||
# tag that are not included in the compose. Updating mtime will
|
||||
# exempt them from cleanup for extra time.
|
||||
os.utime(destination_file)
|
||||
return destination_file
|
||||
|
||||
return self._atomic_download(url, destination_file, validator)
|
||||
|
||||
def get_file(self, path, validator=None):
|
||||
"""
|
||||
If path refers to an existing file in Koji, return a valid local path
|
||||
to it. If no such file exists, return None.
|
||||
|
||||
:param validator: A callable that will be called with the path to the
|
||||
downloaded file if and only if the file was actually downloaded.
|
||||
Any exception raised from there will be abort the download and be
|
||||
propagated.
|
||||
"""
|
||||
if self.has_local_access:
|
||||
# We have koji volume mounted locally. No transformation needed for
|
||||
# the path, just check it exists.
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
else:
|
||||
# We need to download the file.
|
||||
return self._download_file(path, validator)
|
||||
|
|
|
@ -20,7 +20,6 @@ import os
|
|||
import shutil
|
||||
import glob
|
||||
import six
|
||||
import threading
|
||||
from six.moves import shlex_quote
|
||||
from six.moves.urllib.request import urlretrieve
|
||||
from fnmatch import fnmatch
|
||||
|
@ -30,15 +29,12 @@ from kobo.shortcuts import run, force_list
|
|||
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
||||
from .kojiwrapper import KojiWrapper
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
|
||||
class ScmBase(kobo.log.LoggingBase):
|
||||
def __init__(self, logger=None, command=None, compose=None, options=None):
|
||||
def __init__(self, logger=None, command=None, compose=None):
|
||||
kobo.log.LoggingBase.__init__(self, logger=logger)
|
||||
self.command = command
|
||||
self.compose = compose
|
||||
self.options = options or {}
|
||||
|
||||
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
||||
def retry_run(self, cmd, **kwargs):
|
||||
|
@ -160,31 +156,22 @@ class GitWrapper(ScmBase):
|
|||
if "://" not in repo:
|
||||
repo = "file://%s" % repo
|
||||
|
||||
git_cmd = ["git"]
|
||||
if "credential_helper" in self.options:
|
||||
git_cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||
git_cmd.extend(
|
||||
["-c", "credential.helper=%s" % self.options["credential_helper"]]
|
||||
)
|
||||
|
||||
run(["git", "init"], workdir=destdir)
|
||||
try:
|
||||
run(git_cmd + ["fetch", "--depth=1", repo, branch], workdir=destdir)
|
||||
run(["git", "fetch", "--depth=1", repo, branch], workdir=destdir)
|
||||
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
||||
except RuntimeError as e:
|
||||
# Fetch failed, to do a full clone we add a remote to our empty
|
||||
# repo, get its content and check out the reference we want.
|
||||
self.log_debug(
|
||||
"Trying to do a full clone because shallow clone failed: %s %s"
|
||||
% (e, getattr(e, "output", ""))
|
||||
% (e, e.output)
|
||||
)
|
||||
try:
|
||||
# Re-run git init in case of previous failure breaking .git dir
|
||||
run(["git", "init"], workdir=destdir)
|
||||
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
||||
self.retry_run(
|
||||
git_cmd + ["remote", "update", "origin"], workdir=destdir
|
||||
)
|
||||
self.retry_run(["git", "remote", "update", "origin"], workdir=destdir)
|
||||
run(["git", "checkout", branch], workdir=destdir)
|
||||
except RuntimeError:
|
||||
if self.compose:
|
||||
|
@ -198,38 +185,19 @@ class GitWrapper(ScmBase):
|
|||
copy_all(destdir, debugdir)
|
||||
raise
|
||||
|
||||
def get_temp_repo_path(self, scm_root, scm_branch):
|
||||
scm_repo = scm_root.split("/")[-1]
|
||||
process_id = os.getpid()
|
||||
tmp_dir = (
|
||||
"/tmp/pungi-temp-git-repos-"
|
||||
+ str(process_id)
|
||||
+ "/"
|
||||
+ scm_repo
|
||||
+ "-"
|
||||
+ scm_branch
|
||||
)
|
||||
return tmp_dir
|
||||
|
||||
def setup_repo(self, scm_root, scm_branch):
|
||||
tmp_dir = self.get_temp_repo_path(scm_root, scm_branch)
|
||||
if not os.path.isdir(tmp_dir):
|
||||
makedirs(tmp_dir)
|
||||
self._clone(scm_root, scm_branch, tmp_dir)
|
||||
self.run_process_command(tmp_dir)
|
||||
return tmp_dir
|
||||
self.run_process_command(destdir)
|
||||
|
||||
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
||||
scm_dir = scm_dir.lstrip("/")
|
||||
scm_branch = scm_branch or "master"
|
||||
|
||||
self.log_debug(
|
||||
"Exporting directory %s from git %s (branch %s)..."
|
||||
% (scm_dir, scm_root, scm_branch)
|
||||
)
|
||||
with temp_dir() as tmp_dir:
|
||||
self.log_debug(
|
||||
"Exporting directory %s from git %s (branch %s)..."
|
||||
% (scm_dir, scm_root, scm_branch)
|
||||
)
|
||||
|
||||
with lock:
|
||||
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
||||
self._clone(scm_root, scm_branch, tmp_dir)
|
||||
|
||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||
|
||||
|
@ -237,15 +205,15 @@ class GitWrapper(ScmBase):
|
|||
scm_file = scm_file.lstrip("/")
|
||||
scm_branch = scm_branch or "master"
|
||||
|
||||
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
||||
with temp_dir() as tmp_dir:
|
||||
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
||||
|
||||
self.log_debug(
|
||||
"Exporting file %s from git %s (branch %s)..."
|
||||
% (scm_file, scm_root, scm_branch)
|
||||
)
|
||||
self.log_debug(
|
||||
"Exporting file %s from git %s (branch %s)..."
|
||||
% (scm_file, scm_root, scm_branch)
|
||||
)
|
||||
|
||||
with lock:
|
||||
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
||||
self._clone(scm_root, scm_branch, tmp_dir)
|
||||
|
||||
makedirs(target_dir)
|
||||
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
||||
|
@ -393,19 +361,15 @@ def get_file_from_scm(scm_dict, target_path, compose=None):
|
|||
scm_file = os.path.abspath(scm_dict)
|
||||
scm_branch = None
|
||||
command = None
|
||||
options = {}
|
||||
else:
|
||||
scm_type = scm_dict["scm"]
|
||||
scm_repo = scm_dict["repo"]
|
||||
scm_file = scm_dict["file"]
|
||||
scm_branch = scm_dict.get("branch", None)
|
||||
command = scm_dict.get("command")
|
||||
options = scm_dict.get("options", {})
|
||||
|
||||
logger = compose._logger if compose else None
|
||||
scm = _get_wrapper(
|
||||
scm_type, logger=logger, command=command, compose=compose, options=options
|
||||
)
|
||||
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
||||
|
||||
files_copied = []
|
||||
for i in force_list(scm_file):
|
||||
|
@ -486,19 +450,15 @@ def get_dir_from_scm(scm_dict, target_path, compose=None):
|
|||
scm_dir = os.path.abspath(scm_dict)
|
||||
scm_branch = None
|
||||
command = None
|
||||
options = {}
|
||||
else:
|
||||
scm_type = scm_dict["scm"]
|
||||
scm_repo = scm_dict.get("repo", None)
|
||||
scm_dir = scm_dict["dir"]
|
||||
scm_branch = scm_dict.get("branch", None)
|
||||
command = scm_dict.get("command")
|
||||
options = scm_dict.get("options", {})
|
||||
|
||||
logger = compose._logger if compose else None
|
||||
scm = _get_wrapper(
|
||||
scm_type, logger=logger, command=command, compose=compose, options=options
|
||||
)
|
||||
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
||||
|
||||
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
||||
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
||||
|
|
|
@ -276,6 +276,7 @@ class Variant(object):
|
|||
modules=None,
|
||||
modular_koji_tags=None,
|
||||
):
|
||||
|
||||
environments = environments or []
|
||||
buildinstallpackages = buildinstallpackages or []
|
||||
|
||||
|
|
|
@ -0,0 +1,705 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import atexit
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import threading
|
||||
from collections import namedtuple
|
||||
|
||||
import kobo.conf
|
||||
import kobo.log
|
||||
import productmd
|
||||
from kobo import shortcuts
|
||||
from six.moves import configparser, shlex_quote
|
||||
|
||||
import pungi.util
|
||||
from pungi.compose import get_compose_dir
|
||||
from pungi.linker import linker_pool
|
||||
from pungi.phases.pkgset.sources.source_koji import get_koji_event_raw
|
||||
from pungi.util import find_old_compose, parse_koji_event, temp_dir
|
||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
||||
|
||||
|
||||
Config = namedtuple(
|
||||
"Config",
|
||||
[
|
||||
# Path to directory with the compose
|
||||
"target",
|
||||
"compose_type",
|
||||
"label",
|
||||
# Path to the selected old compose that will be reused
|
||||
"old_compose",
|
||||
# Path to directory with config file copies
|
||||
"config_dir",
|
||||
# Which koji event to use (if any)
|
||||
"event",
|
||||
# Additional arguments to pungi-koji executable
|
||||
"extra_args",
|
||||
],
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Status(object):
|
||||
# Ready to start
|
||||
READY = "READY"
|
||||
# Waiting for dependencies to finish.
|
||||
WAITING = "WAITING"
|
||||
# Part is currently running
|
||||
STARTED = "STARTED"
|
||||
# A dependency failed, this one will never start.
|
||||
BLOCKED = "BLOCKED"
|
||||
|
||||
|
||||
class ComposePart(object):
|
||||
def __init__(self, name, config, just_phase=[], skip_phase=[], dependencies=[]):
|
||||
self.name = name
|
||||
self.config = config
|
||||
self.status = Status.WAITING if dependencies else Status.READY
|
||||
self.just_phase = just_phase
|
||||
self.skip_phase = skip_phase
|
||||
self.blocked_on = set(dependencies)
|
||||
self.depends_on = set(dependencies)
|
||||
self.path = None
|
||||
self.log_file = None
|
||||
self.failable = False
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"ComposePart({0.name!r},"
|
||||
" {0.config!r},"
|
||||
" {0.status!r},"
|
||||
" just_phase={0.just_phase!r},"
|
||||
" skip_phase={0.skip_phase!r},"
|
||||
" dependencies={0.depends_on!r})"
|
||||
).format(self)
|
||||
|
||||
def refresh_status(self):
|
||||
"""Refresh status of this part with the result of the compose. This
|
||||
should only be called once the compose finished.
|
||||
"""
|
||||
try:
|
||||
with open(os.path.join(self.path, "STATUS")) as fh:
|
||||
self.status = fh.read().strip()
|
||||
except IOError as exc:
|
||||
log.error("Failed to update status of %s: %s", self.name, exc)
|
||||
log.error("Assuming %s is DOOMED", self.name)
|
||||
self.status = "DOOMED"
|
||||
|
||||
def is_finished(self):
|
||||
return "FINISHED" in self.status
|
||||
|
||||
def unblock_on(self, finished_part):
|
||||
"""Update set of blockers for this part. If it's empty, mark us as ready."""
|
||||
self.blocked_on.discard(finished_part)
|
||||
if self.status == Status.WAITING and not self.blocked_on:
|
||||
log.debug("%s is ready to start", self)
|
||||
self.status = Status.READY
|
||||
|
||||
def setup_start(self, global_config, parts):
|
||||
substitutions = dict(
|
||||
("part-%s" % name, p.path) for name, p in parts.items() if p.is_finished()
|
||||
)
|
||||
substitutions["configdir"] = global_config.config_dir
|
||||
|
||||
config = pungi.util.load_config(self.config)
|
||||
|
||||
for f in config.opened_files:
|
||||
# apply substitutions
|
||||
fill_in_config_file(f, substitutions)
|
||||
|
||||
self.status = Status.STARTED
|
||||
self.path = get_compose_dir(
|
||||
os.path.join(global_config.target, "parts"),
|
||||
config,
|
||||
compose_type=global_config.compose_type,
|
||||
compose_label=global_config.label,
|
||||
)
|
||||
self.log_file = os.path.join(global_config.target, "logs", "%s.log" % self.name)
|
||||
log.info("Starting %s in %s", self.name, self.path)
|
||||
|
||||
def get_cmd(self, global_config):
|
||||
cmd = ["pungi-koji", "--config", self.config, "--compose-dir", self.path]
|
||||
cmd.append("--%s" % global_config.compose_type)
|
||||
if global_config.label:
|
||||
cmd.extend(["--label", global_config.label])
|
||||
for phase in self.just_phase:
|
||||
cmd.extend(["--just-phase", phase])
|
||||
for phase in self.skip_phase:
|
||||
cmd.extend(["--skip-phase", phase])
|
||||
if global_config.old_compose:
|
||||
cmd.extend(
|
||||
["--old-compose", os.path.join(global_config.old_compose, "parts")]
|
||||
)
|
||||
if global_config.event:
|
||||
cmd.extend(["--koji-event", str(global_config.event)])
|
||||
if global_config.extra_args:
|
||||
cmd.extend(global_config.extra_args)
|
||||
cmd.extend(["--no-latest-link"])
|
||||
return cmd
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config, section, config_dir):
|
||||
part = cls(
|
||||
name=section,
|
||||
config=os.path.join(config_dir, config.get(section, "config")),
|
||||
just_phase=_safe_get_list(config, section, "just_phase", []),
|
||||
skip_phase=_safe_get_list(config, section, "skip_phase", []),
|
||||
dependencies=_safe_get_list(config, section, "depends_on", []),
|
||||
)
|
||||
if config.has_option(section, "failable"):
|
||||
part.failable = config.getboolean(section, "failable")
|
||||
return part
|
||||
|
||||
|
||||
def _safe_get_list(config, section, option, default=None):
|
||||
"""Get a value from config parser. The result is split into a list on
|
||||
commas or spaces, and `default` is returned if the key does not exist.
|
||||
"""
|
||||
if config.has_option(section, option):
|
||||
value = config.get(section, option)
|
||||
return [x.strip() for x in re.split(r"[, ]+", value) if x]
|
||||
return default
|
||||
|
||||
|
||||
def fill_in_config_file(fp, substs):
|
||||
"""Templating function. It works with Jinja2 style placeholders such as
|
||||
{{foo}}. Whitespace around the key name is fine. The file is modified in place.
|
||||
|
||||
:param fp string: path to the file to process
|
||||
:param substs dict: a mapping for values to put into the file
|
||||
"""
|
||||
|
||||
def repl(match):
|
||||
try:
|
||||
return substs[match.group(1)]
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
"Unknown placeholder %s in %s" % (exc, os.path.basename(fp))
|
||||
)
|
||||
|
||||
with open(fp, "r") as f:
|
||||
contents = re.sub(r"{{ *([a-zA-Z-_]+) *}}", repl, f.read())
|
||||
with open(fp, "w") as f:
|
||||
f.write(contents)
|
||||
|
||||
|
||||
def start_part(global_config, parts, part):
|
||||
part.setup_start(global_config, parts)
|
||||
fh = open(part.log_file, "w")
|
||||
cmd = part.get_cmd(global_config)
|
||||
log.debug("Running command %r", " ".join(shlex_quote(x) for x in cmd))
|
||||
return subprocess.Popen(cmd, stdout=fh, stderr=subprocess.STDOUT)
|
||||
|
||||
|
||||
def handle_finished(global_config, linker, parts, proc, finished_part):
|
||||
finished_part.refresh_status()
|
||||
log.info("%s finished with status %s", finished_part, finished_part.status)
|
||||
if proc.returncode == 0:
|
||||
# Success, unblock other parts...
|
||||
for part in parts.values():
|
||||
part.unblock_on(finished_part.name)
|
||||
# ...and link the results into final destination.
|
||||
copy_part(global_config, linker, finished_part)
|
||||
update_metadata(global_config, finished_part)
|
||||
else:
|
||||
# Failure, other stuff may be blocked.
|
||||
log.info("See details in %s", finished_part.log_file)
|
||||
block_on(parts, finished_part.name)
|
||||
|
||||
|
||||
def copy_part(global_config, linker, part):
|
||||
c = productmd.Compose(part.path)
|
||||
for variant in c.info.variants:
|
||||
data_path = os.path.join(part.path, "compose", variant)
|
||||
link = os.path.join(global_config.target, "compose", variant)
|
||||
log.info("Hardlinking content %s -> %s", data_path, link)
|
||||
hardlink_dir(linker, data_path, link)
|
||||
|
||||
|
||||
def hardlink_dir(linker, srcdir, dstdir):
|
||||
for root, dirs, files in os.walk(srcdir):
|
||||
root = os.path.relpath(root, srcdir)
|
||||
for f in files:
|
||||
src = os.path.normpath(os.path.join(srcdir, root, f))
|
||||
dst = os.path.normpath(os.path.join(dstdir, root, f))
|
||||
linker.queue_put((src, dst))
|
||||
|
||||
|
||||
def update_metadata(global_config, part):
|
||||
part_metadata_dir = os.path.join(part.path, "compose", "metadata")
|
||||
final_metadata_dir = os.path.join(global_config.target, "compose", "metadata")
|
||||
for f in os.listdir(part_metadata_dir):
|
||||
# Load the metadata
|
||||
with open(os.path.join(part_metadata_dir, f)) as fh:
|
||||
part_metadata = json.load(fh)
|
||||
final_metadata = os.path.join(final_metadata_dir, f)
|
||||
if os.path.exists(final_metadata):
|
||||
# We already have this file, will need to merge.
|
||||
merge_metadata(final_metadata, part_metadata)
|
||||
else:
|
||||
# A new file, just copy it.
|
||||
copy_metadata(global_config, final_metadata, part_metadata)
|
||||
|
||||
|
||||
def copy_metadata(global_config, final_metadata, source):
|
||||
"""Copy file to final location, but update compose information."""
|
||||
with open(
|
||||
os.path.join(global_config.target, "compose/metadata/composeinfo.json")
|
||||
) as f:
|
||||
composeinfo = json.load(f)
|
||||
try:
|
||||
source["payload"]["compose"].update(composeinfo["payload"]["compose"])
|
||||
except KeyError:
|
||||
# No [payload][compose], probably OSBS metadata
|
||||
pass
|
||||
with open(final_metadata, "w") as f:
|
||||
json.dump(source, f, indent=2, sort_keys=True)
|
||||
|
||||
|
||||
def merge_metadata(final_metadata, source):
|
||||
with open(final_metadata) as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
try:
|
||||
key = {
|
||||
"productmd.composeinfo": "variants",
|
||||
"productmd.modules": "modules",
|
||||
"productmd.images": "images",
|
||||
"productmd.rpms": "rpms",
|
||||
}[source["header"]["type"]]
|
||||
# TODO what if multiple parts create images for the same variant
|
||||
metadata["payload"][key].update(source["payload"][key])
|
||||
except KeyError:
|
||||
# OSBS metadata, merge whole file
|
||||
metadata.update(source)
|
||||
with open(final_metadata, "w") as f:
|
||||
json.dump(metadata, f, indent=2, sort_keys=True)
|
||||
|
||||
|
||||
def block_on(parts, name):
|
||||
"""Part ``name`` failed, mark everything depending on it as blocked."""
|
||||
for part in parts.values():
|
||||
if name in part.blocked_on:
|
||||
log.warning("%s is blocked now and will not run", part)
|
||||
part.status = Status.BLOCKED
|
||||
block_on(parts, part.name)
|
||||
|
||||
|
||||
def check_finished_processes(processes):
|
||||
"""Walk through all active processes and check if something finished."""
|
||||
for proc in processes.keys():
|
||||
proc.poll()
|
||||
if proc.returncode is not None:
|
||||
yield proc, processes[proc]
|
||||
|
||||
|
||||
def run_all(global_config, parts):
|
||||
# Mapping subprocess.Popen -> ComposePart
|
||||
processes = dict()
|
||||
remaining = set(p.name for p in parts.values() if not p.is_finished())
|
||||
|
||||
with linker_pool("hardlink") as linker:
|
||||
while remaining or processes:
|
||||
update_status(global_config, parts)
|
||||
|
||||
for proc, part in check_finished_processes(processes):
|
||||
del processes[proc]
|
||||
handle_finished(global_config, linker, parts, proc, part)
|
||||
|
||||
# Start new available processes.
|
||||
for name in list(remaining):
|
||||
part = parts[name]
|
||||
# Start all ready parts
|
||||
if part.status == Status.READY:
|
||||
remaining.remove(name)
|
||||
processes[start_part(global_config, parts, part)] = part
|
||||
# Remove blocked parts from todo list
|
||||
elif part.status == Status.BLOCKED:
|
||||
remaining.remove(part.name)
|
||||
|
||||
# Wait for any child process to finish if there is any.
|
||||
if processes:
|
||||
pid, reason = os.wait()
|
||||
for proc in processes.keys():
|
||||
# Set the return code for process that we caught by os.wait().
|
||||
# Calling poll() on it would not set the return code properly
|
||||
# since the value was already consumed by os.wait().
|
||||
if proc.pid == pid:
|
||||
proc.returncode = (reason >> 8) & 0xFF
|
||||
|
||||
log.info("Waiting for linking to finish...")
|
||||
return update_status(global_config, parts)
|
||||
|
||||
|
||||
def get_target_dir(config, compose_info, label, reldir=""):
|
||||
"""Find directory where this compose will be.
|
||||
|
||||
@param reldir: if target path in config is relative, it will be resolved
|
||||
against this directory
|
||||
"""
|
||||
dir = os.path.realpath(os.path.join(reldir, config.get("general", "target")))
|
||||
target_dir = get_compose_dir(
|
||||
dir,
|
||||
compose_info,
|
||||
compose_type=config.get("general", "compose_type"),
|
||||
compose_label=label,
|
||||
)
|
||||
return target_dir
|
||||
|
||||
|
||||
def setup_logging(debug=False):
|
||||
FORMAT = "%(asctime)s: %(levelname)s: %(message)s"
|
||||
level = logging.DEBUG if debug else logging.INFO
|
||||
kobo.log.add_stderr_logger(log, log_level=level, format=FORMAT)
|
||||
log.setLevel(level)
|
||||
|
||||
|
||||
def compute_status(statuses):
|
||||
if any(map(lambda x: x[0] in ("STARTED", "WAITING"), statuses)):
|
||||
# If there is anything still running or waiting to start, the whole is
|
||||
# still running.
|
||||
return "STARTED"
|
||||
elif any(map(lambda x: x[0] in ("DOOMED", "BLOCKED") and not x[1], statuses)):
|
||||
# If any required part is doomed or blocked, the whole is doomed
|
||||
return "DOOMED"
|
||||
elif all(map(lambda x: x[0] == "FINISHED", statuses)):
|
||||
# If all parts are complete, the whole is complete
|
||||
return "FINISHED"
|
||||
else:
|
||||
return "FINISHED_INCOMPLETE"
|
||||
|
||||
|
||||
def update_status(global_config, parts):
|
||||
log.debug("Updating status metadata")
|
||||
metadata = {}
|
||||
statuses = set()
|
||||
for part in parts.values():
|
||||
metadata[part.name] = {"status": part.status, "path": part.path}
|
||||
statuses.add((part.status, part.failable))
|
||||
metadata_path = os.path.join(
|
||||
global_config.target, "compose", "metadata", "parts.json"
|
||||
)
|
||||
with open(metadata_path, "w") as fh:
|
||||
json.dump(metadata, fh, indent=2, sort_keys=True, separators=(",", ": "))
|
||||
|
||||
status = compute_status(statuses)
|
||||
log.info("Overall status is %s", status)
|
||||
with open(os.path.join(global_config.target, "STATUS"), "w") as fh:
|
||||
fh.write(status)
|
||||
|
||||
return status != "DOOMED"
|
||||
|
||||
|
||||
def prepare_compose_dir(config, args, main_config_file, compose_info):
|
||||
if not hasattr(args, "compose_path"):
|
||||
# Creating a brand new compose
|
||||
target_dir = get_target_dir(
|
||||
config, compose_info, args.label, reldir=os.path.dirname(main_config_file)
|
||||
)
|
||||
for dir in ("logs", "parts", "compose/metadata", "work/global"):
|
||||
try:
|
||||
os.makedirs(os.path.join(target_dir, dir))
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
with open(os.path.join(target_dir, "STATUS"), "w") as fh:
|
||||
fh.write("STARTED")
|
||||
# Copy initial composeinfo for new compose
|
||||
shutil.copy(
|
||||
os.path.join(target_dir, "work/global/composeinfo-base.json"),
|
||||
os.path.join(target_dir, "compose/metadata/composeinfo.json"),
|
||||
)
|
||||
else:
|
||||
# Restarting a particular compose
|
||||
target_dir = args.compose_path
|
||||
|
||||
return target_dir
|
||||
|
||||
|
||||
def load_parts_metadata(global_config):
|
||||
parts_metadata = os.path.join(global_config.target, "compose/metadata/parts.json")
|
||||
with open(parts_metadata) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def setup_for_restart(global_config, parts, to_restart):
|
||||
has_stuff_to_do = False
|
||||
metadata = load_parts_metadata(global_config)
|
||||
for key in metadata:
|
||||
# Update state to match what is on disk
|
||||
log.debug(
|
||||
"Reusing %s (%s) from %s",
|
||||
key,
|
||||
metadata[key]["status"],
|
||||
metadata[key]["path"],
|
||||
)
|
||||
parts[key].status = metadata[key]["status"]
|
||||
parts[key].path = metadata[key]["path"]
|
||||
for key in to_restart:
|
||||
# Set restarted parts to run again
|
||||
parts[key].status = Status.WAITING
|
||||
parts[key].path = None
|
||||
|
||||
for key in to_restart:
|
||||
# Remove blockers that are already finished
|
||||
for blocker in list(parts[key].blocked_on):
|
||||
if parts[blocker].is_finished():
|
||||
parts[key].blocked_on.discard(blocker)
|
||||
if not parts[key].blocked_on:
|
||||
log.debug("Part %s in not blocked", key)
|
||||
# Nothing blocks it; let's go
|
||||
parts[key].status = Status.READY
|
||||
has_stuff_to_do = True
|
||||
|
||||
if not has_stuff_to_do:
|
||||
raise RuntimeError("All restarted parts are blocked. Nothing to do.")
|
||||
|
||||
|
||||
def run_kinit(config):
|
||||
if not config.getboolean("general", "kerberos"):
|
||||
return
|
||||
|
||||
keytab = config.get("general", "kerberos_keytab")
|
||||
principal = config.get("general", "kerberos_principal")
|
||||
|
||||
fd, fname = tempfile.mkstemp(prefix="krb5cc_pungi-orchestrate_")
|
||||
os.close(fd)
|
||||
os.environ["KRB5CCNAME"] = fname
|
||||
shortcuts.run(["kinit", "-k", "-t", keytab, principal])
|
||||
log.debug("Created a kerberos ticket for %s", principal)
|
||||
|
||||
atexit.register(os.remove, fname)
|
||||
|
||||
|
||||
def get_compose_data(compose_path):
|
||||
try:
|
||||
compose = productmd.compose.Compose(compose_path)
|
||||
data = {
|
||||
"compose_id": compose.info.compose.id,
|
||||
"compose_date": compose.info.compose.date,
|
||||
"compose_type": compose.info.compose.type,
|
||||
"compose_respin": str(compose.info.compose.respin),
|
||||
"compose_label": compose.info.compose.label,
|
||||
"release_id": compose.info.release_id,
|
||||
"release_name": compose.info.release.name,
|
||||
"release_short": compose.info.release.short,
|
||||
"release_version": compose.info.release.version,
|
||||
"release_type": compose.info.release.type,
|
||||
"release_is_layered": compose.info.release.is_layered,
|
||||
}
|
||||
if compose.info.release.is_layered:
|
||||
data.update(
|
||||
{
|
||||
"base_product_name": compose.info.base_product.name,
|
||||
"base_product_short": compose.info.base_product.short,
|
||||
"base_product_version": compose.info.base_product.version,
|
||||
"base_product_type": compose.info.base_product.type,
|
||||
}
|
||||
)
|
||||
return data
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def get_script_env(compose_path):
|
||||
env = os.environ.copy()
|
||||
env["COMPOSE_PATH"] = compose_path
|
||||
for key, value in get_compose_data(compose_path).items():
|
||||
if isinstance(value, bool):
|
||||
env[key.upper()] = "YES" if value else ""
|
||||
else:
|
||||
env[key.upper()] = str(value) if value else ""
|
||||
return env
|
||||
|
||||
|
||||
def run_scripts(prefix, compose_dir, scripts):
|
||||
env = get_script_env(compose_dir)
|
||||
for idx, script in enumerate(scripts.strip().splitlines()):
|
||||
command = script.strip()
|
||||
logfile = os.path.join(compose_dir, "logs", "%s%s.log" % (prefix, idx))
|
||||
log.debug("Running command: %r", command)
|
||||
log.debug("See output in %s", logfile)
|
||||
shortcuts.run(command, env=env, logfile=logfile)
|
||||
|
||||
|
||||
def try_translate_path(parts, path):
|
||||
translation = []
|
||||
for part in parts.values():
|
||||
conf = pungi.util.load_config(part.config)
|
||||
translation.extend(conf.get("translate_paths", []))
|
||||
return pungi.util.translate_path_raw(translation, path)
|
||||
|
||||
|
||||
def send_notification(compose_dir, command, parts):
|
||||
if not command:
|
||||
return
|
||||
from pungi.notifier import PungiNotifier
|
||||
|
||||
data = get_compose_data(compose_dir)
|
||||
data["location"] = try_translate_path(parts, compose_dir)
|
||||
notifier = PungiNotifier([command])
|
||||
with open(os.path.join(compose_dir, "STATUS")) as f:
|
||||
status = f.read().strip()
|
||||
notifier.send("status-change", workdir=compose_dir, status=status, **data)
|
||||
|
||||
|
||||
def setup_progress_monitor(global_config, parts):
|
||||
"""Update configuration so that each part send notifications about its
|
||||
progress to the orchestrator.
|
||||
|
||||
There is a file to which the notification is written. The orchestrator is
|
||||
reading it and mapping the entries to particular parts. The path to this
|
||||
file is stored in an environment variable.
|
||||
"""
|
||||
tmp_file = tempfile.NamedTemporaryFile(prefix="pungi-progress-monitor_")
|
||||
os.environ["_PUNGI_ORCHESTRATOR_PROGRESS_MONITOR"] = tmp_file.name
|
||||
atexit.register(os.remove, tmp_file.name)
|
||||
|
||||
global_config.extra_args.append(
|
||||
"--notification-script=pungi-notification-report-progress"
|
||||
)
|
||||
|
||||
def reader():
|
||||
while True:
|
||||
line = tmp_file.readline()
|
||||
if not line:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
path, msg = line.split(":", 1)
|
||||
for part in parts:
|
||||
if parts[part].path == os.path.dirname(path):
|
||||
log.debug("%s: %s", part, msg.strip())
|
||||
break
|
||||
|
||||
monitor = threading.Thread(target=reader)
|
||||
monitor.daemon = True
|
||||
monitor.start()
|
||||
|
||||
|
||||
def run(work_dir, main_config_file, args):
|
||||
config_dir = os.path.join(work_dir, "config")
|
||||
shutil.copytree(os.path.dirname(main_config_file), config_dir)
|
||||
|
||||
# Read main config
|
||||
parser = configparser.RawConfigParser(
|
||||
defaults={
|
||||
"kerberos": "false",
|
||||
"pre_compose_script": "",
|
||||
"post_compose_script": "",
|
||||
"notification_script": "",
|
||||
}
|
||||
)
|
||||
parser.read(main_config_file)
|
||||
|
||||
# Create kerberos ticket
|
||||
run_kinit(parser)
|
||||
|
||||
compose_info = dict(parser.items("general"))
|
||||
compose_type = parser.get("general", "compose_type")
|
||||
|
||||
target_dir = prepare_compose_dir(parser, args, main_config_file, compose_info)
|
||||
kobo.log.add_file_logger(log, os.path.join(target_dir, "logs", "orchestrator.log"))
|
||||
log.info("Composing %s", target_dir)
|
||||
|
||||
run_scripts("pre_compose_", target_dir, parser.get("general", "pre_compose_script"))
|
||||
|
||||
old_compose = find_old_compose(
|
||||
os.path.dirname(target_dir),
|
||||
compose_info["release_short"],
|
||||
compose_info["release_version"],
|
||||
"",
|
||||
)
|
||||
if old_compose:
|
||||
log.info("Reusing old compose %s", old_compose)
|
||||
|
||||
global_config = Config(
|
||||
target=target_dir,
|
||||
compose_type=compose_type,
|
||||
label=args.label,
|
||||
old_compose=old_compose,
|
||||
config_dir=os.path.dirname(main_config_file),
|
||||
event=args.koji_event,
|
||||
extra_args=_safe_get_list(parser, "general", "extra_args"),
|
||||
)
|
||||
|
||||
if not global_config.event and parser.has_option("general", "koji_profile"):
|
||||
koji_wrapper = KojiWrapper(parser.get("general", "koji_profile"))
|
||||
event_file = os.path.join(global_config.target, "work/global/koji-event")
|
||||
result = get_koji_event_raw(koji_wrapper, None, event_file)
|
||||
global_config = global_config._replace(event=result["id"])
|
||||
|
||||
parts = {}
|
||||
for section in parser.sections():
|
||||
if section == "general":
|
||||
continue
|
||||
parts[section] = ComposePart.from_config(parser, section, config_dir)
|
||||
|
||||
if hasattr(args, "part"):
|
||||
setup_for_restart(global_config, parts, args.part)
|
||||
|
||||
setup_progress_monitor(global_config, parts)
|
||||
|
||||
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
||||
|
||||
retcode = run_all(global_config, parts)
|
||||
|
||||
if retcode:
|
||||
# Only run the script if we are not doomed.
|
||||
run_scripts(
|
||||
"post_compose_", target_dir, parser.get("general", "post_compose_script")
|
||||
)
|
||||
|
||||
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
||||
|
||||
return retcode
|
||||
|
||||
|
||||
def parse_args(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--debug", action="store_true")
|
||||
parser.add_argument("--koji-event", metavar="ID", type=parse_koji_event)
|
||||
subparsers = parser.add_subparsers()
|
||||
start = subparsers.add_parser("start")
|
||||
start.add_argument("config", metavar="CONFIG")
|
||||
start.add_argument("--label")
|
||||
|
||||
restart = subparsers.add_parser("restart")
|
||||
restart.add_argument("config", metavar="CONFIG")
|
||||
restart.add_argument("compose_path", metavar="COMPOSE_PATH")
|
||||
restart.add_argument(
|
||||
"part", metavar="PART", nargs="*", help="which parts to restart"
|
||||
)
|
||||
restart.add_argument("--label")
|
||||
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
args = parse_args(argv)
|
||||
setup_logging(args.debug)
|
||||
|
||||
main_config_file = os.path.abspath(args.config)
|
||||
|
||||
with temp_dir() as work_dir:
|
||||
try:
|
||||
if not run(work_dir, main_config_file, args):
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
log.exception("Unhandled exception!")
|
||||
sys.exit(1)
|
|
@ -1,8 +1,6 @@
|
|||
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
||||
dict.sorted
|
||||
dogpile.cache
|
||||
flufl.lock ; python_version >= '3.0'
|
||||
flufl.lock < 3.0 ; python_version <= '2.7'
|
||||
funcsigs
|
||||
jsonschema
|
||||
kobo
|
||||
|
|
11
setup.py
11
setup.py
|
@ -5,9 +5,14 @@
|
|||
import os
|
||||
import glob
|
||||
|
||||
import distutils.command.sdist
|
||||
from setuptools import setup
|
||||
|
||||
|
||||
# override default tarball format with bzip2
|
||||
distutils.command.sdist.sdist.default_format = {"posix": "bztar"}
|
||||
|
||||
|
||||
# recursively scan for python modules to be included
|
||||
package_root_dirs = ["pungi", "pungi_utils"]
|
||||
packages = set()
|
||||
|
@ -20,7 +25,7 @@ packages = sorted(packages)
|
|||
|
||||
setup(
|
||||
name="pungi",
|
||||
version="4.5.0",
|
||||
version="4.3.7",
|
||||
description="Distribution compose tool",
|
||||
url="https://pagure.io/pungi",
|
||||
author="Dennis Gilmore",
|
||||
|
@ -32,16 +37,15 @@ setup(
|
|||
"comps_filter = pungi.scripts.comps_filter:main",
|
||||
"pungi = pungi.scripts.pungi:main",
|
||||
"pungi-create-unified-isos = pungi.scripts.create_unified_isos:main",
|
||||
"pungi-fedmsg-notification = pungi.scripts.fedmsg_notification:main",
|
||||
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||
"pungi-make-ostree = pungi.ostree:main",
|
||||
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
||||
"pungi-orchestrate = pungi_utils.orchestrator:main",
|
||||
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
||||
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
||||
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
||||
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
||||
"pungi-cache-cleanup = pungi.scripts.cache_cleanup:main",
|
||||
"pungi-gather-modules = pungi.scripts.gather_modules:cli_main",
|
||||
"pungi-gather-rpms = pungi.scripts.gather_rpms:cli_main",
|
||||
"pungi-generate-packages-json = pungi.scripts.create_packages_json:cli_main", # noqa: E501
|
||||
|
@ -50,7 +54,6 @@ setup(
|
|||
},
|
||||
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
||||
data_files=[
|
||||
("/usr/lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
||||
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
||||
("/usr/share/pungi", glob.glob("share/*.ks")),
|
||||
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
||||
|
|
|
@ -108,7 +108,6 @@
|
|||
<groupid>core</groupid>
|
||||
</grouplist>
|
||||
<optionlist>
|
||||
<groupid arch="x86_64">standard</groupid>
|
||||
</optionlist>
|
||||
</environment>
|
||||
|
||||
|
|
|
@ -21,15 +21,6 @@ from pungi import paths, checks
|
|||
from pungi.module_util import Modulemd
|
||||
|
||||
|
||||
GIT_WITH_CREDS = [
|
||||
"git",
|
||||
"-c",
|
||||
"credential.useHttpPath=true",
|
||||
"-c",
|
||||
"credential.helper=!ch",
|
||||
]
|
||||
|
||||
|
||||
class BaseTestCase(unittest.TestCase):
|
||||
def assertFilesEqual(self, fn1, fn2):
|
||||
with open(fn1, "rb") as f1:
|
||||
|
@ -167,20 +158,6 @@ class IterableMock(mock.Mock):
|
|||
return iter([])
|
||||
|
||||
|
||||
class FSKojiDownloader(object):
|
||||
"""Mock for KojiDownloadProxy that checks provided path."""
|
||||
|
||||
def get_file(self, path, validator=None):
|
||||
return path if os.path.isfile(path) else None
|
||||
|
||||
|
||||
class DummyKojiDownloader(object):
|
||||
"""Mock for KojiDownloadProxy that always finds the file in original location."""
|
||||
|
||||
def get_file(self, path, validator=None):
|
||||
return path
|
||||
|
||||
|
||||
class DummyCompose(object):
|
||||
def __init__(self, topdir, config):
|
||||
self.supported = True
|
||||
|
@ -255,8 +232,6 @@ class DummyCompose(object):
|
|||
self.cache_region = None
|
||||
self.containers_metadata = {}
|
||||
self.load_old_compose_config = mock.Mock(return_value=None)
|
||||
self.koji_downloader = DummyKojiDownloader()
|
||||
self.koji_downloader.path_prefix = "/prefix"
|
||||
|
||||
def setup_optional(self):
|
||||
self.all_variants["Server-optional"] = MockVariant(
|
||||
|
@ -297,7 +272,7 @@ class DummyCompose(object):
|
|||
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=self.topdir)
|
||||
|
||||
|
||||
def touch(path, content=None, mode=None):
|
||||
def touch(path, content=None):
|
||||
"""Helper utility that creates an dummy file in given location. Directories
|
||||
will be created."""
|
||||
content = content or (path + "\n")
|
||||
|
@ -309,8 +284,6 @@ def touch(path, content=None, mode=None):
|
|||
content = content.encode()
|
||||
with open(path, "wb") as f:
|
||||
f.write(content)
|
||||
if mode:
|
||||
os.chmod(path, mode)
|
||||
return path
|
||||
|
||||
|
||||
|
|
|
@ -1209,7 +1209,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "lorax",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"runroot_weights": {"buildinstall": 123},
|
||||
},
|
||||
)
|
||||
|
@ -1309,7 +1308,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"lorax_use_koji_plugin": True,
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"runroot_weights": {"buildinstall": 123},
|
||||
},
|
||||
)
|
||||
|
@ -1414,7 +1412,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "buildinstall",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -1503,7 +1500,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "buildinstall",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||
},
|
||||
)
|
||||
|
@ -1546,7 +1542,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "lorax",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||
},
|
||||
)
|
||||
|
@ -1596,7 +1591,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "lorax",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||
},
|
||||
)
|
||||
|
@ -1669,7 +1663,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "lorax",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||
},
|
||||
)
|
||||
|
@ -1708,7 +1701,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "lorax",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"runroot_weights": {"buildinstall": 123},
|
||||
"buildinstall_topdir": "/buildinstall_topdir",
|
||||
},
|
||||
|
@ -1818,7 +1810,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||
"buildinstall_method": "lorax",
|
||||
"runroot_tag": "rrt",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
@ -656,7 +656,6 @@ class ComposeTestCase(unittest.TestCase):
|
|||
mocked_requests.post.assert_called_once_with(
|
||||
"https://cts.localhost.tld/api/1/composes/",
|
||||
auth=mock.ANY,
|
||||
data=None,
|
||||
json=expected_json,
|
||||
)
|
||||
|
||||
|
@ -795,16 +794,12 @@ class TracebackTest(unittest.TestCase):
|
|||
shutil.rmtree(self.tmp_dir)
|
||||
self.patcher.stop()
|
||||
|
||||
def assertTraceback(self, filename, show_locals=True):
|
||||
def assertTraceback(self, filename):
|
||||
self.assertTrue(
|
||||
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
||||
)
|
||||
self.assertEqual(
|
||||
self.Traceback.mock_calls,
|
||||
[
|
||||
mock.call(show_locals=show_locals),
|
||||
mock.call(show_locals=show_locals).get_traceback(),
|
||||
],
|
||||
self.Traceback.mock_calls, [mock.call(), mock.call().get_traceback()]
|
||||
)
|
||||
|
||||
def test_traceback_default(self):
|
||||
|
@ -829,8 +824,8 @@ class RetryRequestTest(unittest.TestCase):
|
|||
self.assertEqual(
|
||||
mocked_requests.mock_calls,
|
||||
[
|
||||
mock.call.post(url, data=None, json=None, auth=None),
|
||||
mock.call.post(url, data=None, json=None, auth=None),
|
||||
mock.call.post(url, json=None, auth=None),
|
||||
mock.call.post(url, json=None, auth=None),
|
||||
],
|
||||
)
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
|
@ -846,5 +841,5 @@ class RetryRequestTest(unittest.TestCase):
|
|||
|
||||
self.assertEqual(
|
||||
mocked_requests.mock_calls,
|
||||
[mock.call.post(url, data=None, json=None, auth=None)],
|
||||
[mock.call.post(url, json=None, auth=None)],
|
||||
)
|
||||
|
|
|
@ -440,7 +440,7 @@ class LiveMediaConfigTestCase(ConfigTestCase):
|
|||
live_media_version="Rawhide",
|
||||
)
|
||||
|
||||
resolve_git_url.side_effect = lambda x, _helper: x.replace("HEAD", "CAFE")
|
||||
resolve_git_url.side_effect = lambda x: x.replace("HEAD", "CAFE")
|
||||
|
||||
self.assertValidation(cfg)
|
||||
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
||||
|
|
|
@ -57,12 +57,7 @@ class TestPackagesJson(TestCase):
|
|||
'pungi.scripts.create_packages_json.tempfile.NamedTemporaryFile',
|
||||
) as mock_tempfile:
|
||||
mock_tempfile.return_value.__enter__.return_value.name = 'tmpfile'
|
||||
packages_generator = PackagesGenerator(
|
||||
variants=[],
|
||||
excluded_packages=[],
|
||||
included_packages=[],
|
||||
)
|
||||
file_name = packages_generator.get_remote_file_content(
|
||||
file_name = PackagesGenerator.get_remote_file_content(
|
||||
file_url='fakeurl')
|
||||
mock_requests_get.assert_called_once_with(url='fakeurl')
|
||||
mock_tempfile.assert_called_once_with(delete=False)
|
||||
|
|
|
@ -552,7 +552,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||
"release_version": "1.0",
|
||||
"runroot_tag": "f25-build",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
cmd = {
|
||||
|
@ -634,7 +633,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||
"release_version": "1.0",
|
||||
"runroot_tag": "f25-build",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"create_jigdo": False,
|
||||
"runroot_weights": {"createiso": 123},
|
||||
},
|
||||
|
@ -719,7 +717,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||
"buildinstall_method": "lorax",
|
||||
"runroot_tag": "f25-build",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
cmd = {
|
||||
|
@ -810,7 +807,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||
"release_version": "1.0",
|
||||
"runroot_tag": "f25-build",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
cmd = {
|
||||
|
@ -843,7 +839,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||
"release_version": "1.0",
|
||||
"runroot_tag": "f25-build",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||
},
|
||||
)
|
||||
|
@ -886,7 +881,6 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||
"release_version": "1.0",
|
||||
"runroot_tag": "f25-build",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||
},
|
||||
)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from unittest import mock
|
||||
from parameterized import parameterized
|
||||
|
||||
import os
|
||||
from six.moves import StringIO
|
||||
|
@ -392,27 +391,3 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||
),
|
||||
]
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[("644", 0o644), ("664", 0o664), ("666", 0o666), ("2644", 0o2644)]
|
||||
)
|
||||
def test_get_perms_non_executable(self, test_name, mode):
|
||||
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
||||
self.assertEqual(createiso._get_perms(path), 0o444)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
("544", 0o544),
|
||||
("554", 0o554),
|
||||
("555", 0o555),
|
||||
("744", 0o744),
|
||||
("755", 0o755),
|
||||
("774", 0o774),
|
||||
("775", 0o775),
|
||||
("777", 0o777),
|
||||
("2775", 0o2775),
|
||||
]
|
||||
)
|
||||
def test_get_perms_executable(self, test_name, mode):
|
||||
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
||||
self.assertEqual(createiso._get_perms(path), 0o555)
|
||||
|
|
|
@ -6,7 +6,6 @@ from pathlib import Path
|
|||
from pyfakefs.fake_filesystem_unittest import TestCase
|
||||
|
||||
from pungi.scripts.gather_rpms import search_rpms, copy_rpms, Package
|
||||
from productmd.common import parse_nvra
|
||||
|
||||
PATH_TO_REPOS = '/path/to/repos'
|
||||
MODULES_YAML_GZ = 'modules.yaml.gz'
|
||||
|
@ -42,39 +41,37 @@ class TestGatherRpms(TestCase):
|
|||
|
||||
def test_gather_rpms(self):
|
||||
self.assertEqual(
|
||||
[Package(nvra=parse_nvra('libvirt-6.0.0-28.module_'
|
||||
'el8.3.0+555+a55c8938.i686'),
|
||||
[Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||
f'libvirt-6.0.0-28.module_el'
|
||||
f'8.3.0+555+a55c8938.i686.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('libgit2-devel-0.26.8-2.el8.x86_64'),
|
||||
Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('xalan-j2-2.7.1-38.module_el'
|
||||
'8.0.0+30+832da3a1.noarch'),
|
||||
Package(nvra='xalan-j2-2.7.1-38.module_el'
|
||||
'8.0.0+30+832da3a1.noarch',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||
f'xalan-j2-2.7.1-38.module_el'
|
||||
f'8.0.0+30+832da3a1.noarch.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('bnd-maven-plugin-3.5.0-4.module_el'
|
||||
'8.0.0+30+832da3a1.noarch'),
|
||||
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el'
|
||||
'8.0.0+30+832da3a1.noarch',
|
||||
path=Path(
|
||||
'/path/to/repos/appstream/Packages/'
|
||||
'bnd-maven-plugin-3.5.0-4.module_el'
|
||||
'8.0.0+30+832da3a1.noarch.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('OpenEXR-devel-2.2.0-11.el8.i686'),
|
||||
Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('mingw-binutils-generic-'
|
||||
'2.30-1.el8.x86_64'),
|
||||
Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
|
||||
|
@ -87,45 +84,42 @@ class TestGatherRpms(TestCase):
|
|||
target_path = Path('/mnt/koji')
|
||||
packages = [
|
||||
|
||||
Package(nvra=parse_nvra('libvirt-6.0.0-28.module_'
|
||||
'el8.3.0+555+a55c8938.i686'),
|
||||
Package(nvra='libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||
f'libvirt-6.0.0-28.module_el'
|
||||
f'8.3.0+555+a55c8938.i686.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('libgit2-devel-0.26.8-2.el8.x86_64'),
|
||||
Package(nvra='libgit2-devel-0.26.8-2.el8.x86_64',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('xalan-j2-2.7.1-38.module_'
|
||||
'el8.0.0+30+832da3a1.noarch'),
|
||||
Package(nvra='xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||
f'xalan-j2-2.7.1-38.module_el'
|
||||
f'8.0.0+30+832da3a1.noarch.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('bnd-maven-plugin-3.5.0-4.module_el'
|
||||
'8.0.0+30+832da3a1.noarch'),
|
||||
Package(nvra='bnd-maven-plugin-3.5.0-4.module_el'
|
||||
'8.0.0+30+832da3a1.noarch',
|
||||
path=Path(
|
||||
'/path/to/repos/appstream/Packages/'
|
||||
'bnd-maven-plugin-3.5.0-4.module_el'
|
||||
'8.0.0+30+832da3a1.noarch.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('OpenEXR-devel-2.2.0-11.el8.i686'),
|
||||
Package(nvra='OpenEXR-devel-2.2.0-11.el8.i686',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
|
||||
)),
|
||||
Package(nvra=parse_nvra('mingw-binutils-generic-'
|
||||
'2.30-1.el8.x86_64'),
|
||||
Package(nvra='mingw-binutils-generic-2.30-1.el8.x86_64',
|
||||
path=Path(
|
||||
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
|
||||
))
|
||||
]
|
||||
copy_rpms(packages, target_path, [])
|
||||
copy_rpms(packages, target_path)
|
||||
|
||||
self.assertCountEqual([
|
||||
'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm',
|
||||
|
|
|
@ -122,7 +122,6 @@ class ImageContainerThreadTest(helpers.PungiTestCase):
|
|||
self.topdir,
|
||||
{
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"translate_paths": [(self.topdir, "http://root")],
|
||||
},
|
||||
)
|
||||
|
|
|
@ -35,7 +35,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
{
|
||||
"image_build": {"^Client|Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -46,7 +45,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
client_args = {
|
||||
"original_image_conf": original_image_conf,
|
||||
"image_conf": {
|
||||
|
@ -128,7 +127,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
"image_build_version": "Rawhide",
|
||||
"image_build": {"^Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -139,7 +137,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
server_args = {
|
||||
"original_image_conf": original_image_conf,
|
||||
"image_conf": {
|
||||
|
@ -190,7 +188,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
"image_build_target": "f24",
|
||||
"image_build": {"^Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -199,7 +196,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
server_args = {
|
||||
"original_image_conf": original_image_conf,
|
||||
"image_conf": {
|
||||
|
@ -254,7 +251,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -265,8 +261,8 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_not_called()
|
||||
phase.pool.queue_put.assert_not_called()
|
||||
self.assertFalse(phase.pool.add.called)
|
||||
self.assertFalse(phase.pool.queue_put.called)
|
||||
|
||||
@mock.patch("pungi.phases.image_build.ThreadPool")
|
||||
def test_image_build_set_install_tree(self, ThreadPool):
|
||||
|
@ -290,7 +286,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
{
|
||||
"image_build": {"^Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
compose.setup_optional()
|
||||
|
@ -302,9 +297,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
phase.pool.queue_put.assert_called_once()
|
||||
self.assertTrue(phase.pool.queue_put.called_once)
|
||||
args, kwargs = phase.pool.queue_put.call_args
|
||||
self.assertEqual(args[0][0], compose)
|
||||
self.assertDictEqual(
|
||||
|
@ -358,7 +353,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
{
|
||||
"image_build": {"^Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"translate_paths": [("/my", "http://example.com")],
|
||||
},
|
||||
)
|
||||
|
@ -370,9 +364,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
phase.pool.queue_put.assert_called_once()
|
||||
self.assertTrue(phase.pool.queue_put.called_once)
|
||||
args, kwargs = phase.pool.queue_put.call_args
|
||||
self.assertEqual(args[0][0], compose)
|
||||
self.assertDictEqual(
|
||||
|
@ -425,7 +419,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
{
|
||||
"image_build": {"^Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
compose.setup_optional()
|
||||
|
@ -437,9 +430,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
phase.pool.queue_put.assert_called_once()
|
||||
self.assertTrue(phase.pool.queue_put.called_once)
|
||||
args, kwargs = phase.pool.queue_put.call_args
|
||||
self.assertEqual(args[0][0], compose)
|
||||
self.assertDictEqual(
|
||||
|
@ -498,7 +491,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
{
|
||||
"image_build": {"^Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -509,9 +501,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
phase.pool.queue_put.assert_called_once()
|
||||
self.assertTrue(phase.pool.queue_put.called_once)
|
||||
args, kwargs = phase.pool.queue_put.call_args
|
||||
self.assertEqual(args[0][0], compose)
|
||||
self.assertDictEqual(
|
||||
|
@ -567,7 +559,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -578,9 +569,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
phase.pool.queue_put.assert_called_once()
|
||||
self.assertTrue(phase.pool.queue_put.called_once)
|
||||
args, kwargs = phase.pool.queue_put.call_args
|
||||
self.assertEqual(
|
||||
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
||||
|
@ -611,7 +602,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -622,9 +612,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
phase.pool.queue_put.assert_called_once()
|
||||
self.assertTrue(phase.pool.queue_put.called_once)
|
||||
args, kwargs = phase.pool.queue_put.call_args
|
||||
self.assertEqual(
|
||||
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
||||
|
@ -655,7 +645,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -666,9 +655,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
phase.pool.queue_put.assert_called_once()
|
||||
self.assertTrue(phase.pool.queue_put.called_once)
|
||||
args, kwargs = phase.pool.queue_put.call_args
|
||||
self.assertTrue(args[0][1].get("scratch"))
|
||||
|
||||
|
@ -692,7 +681,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
{
|
||||
"image_build": {"^Server-optional$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
compose.setup_optional()
|
||||
|
@ -704,7 +692,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
server_args = {
|
||||
"original_image_conf": original_image_conf,
|
||||
"image_conf": {
|
||||
|
@ -756,7 +744,6 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
{
|
||||
"image_build": {"^Server$": [original_image_conf]},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
compose.setup_optional()
|
||||
|
@ -768,7 +755,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
server_args = {
|
||||
"original_image_conf": original_image_conf,
|
||||
"image_conf": {
|
||||
|
@ -956,9 +943,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||
@mock.patch("pungi.phases.image_build.Linker")
|
||||
def test_process_handle_fail(self, Linker, KojiWrapper):
|
||||
compose = DummyCompose(
|
||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||
)
|
||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||
pool = mock.Mock()
|
||||
cmd = {
|
||||
"image_conf": {
|
||||
|
@ -1015,9 +1000,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||
@mock.patch("pungi.phases.image_build.Linker")
|
||||
def test_process_handle_exception(self, Linker, KojiWrapper):
|
||||
compose = DummyCompose(
|
||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||
)
|
||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||
pool = mock.Mock()
|
||||
cmd = {
|
||||
"image_conf": {
|
||||
|
@ -1063,9 +1046,7 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||
@mock.patch("pungi.phases.image_build.Linker")
|
||||
def test_process_handle_fail_only_one_optional(self, Linker, KojiWrapper):
|
||||
compose = DummyCompose(
|
||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||
)
|
||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||
pool = mock.Mock()
|
||||
cmd = {
|
||||
"image_conf": {
|
||||
|
|
|
@ -28,7 +28,6 @@ def fake_listdir(pattern, result=None, exc=None):
|
|||
"""Create a function that mocks os.listdir. If the path contains pattern,
|
||||
result will be returned or exc raised. Otherwise it's normal os.listdir
|
||||
"""
|
||||
|
||||
# The point of this is to avoid issues on Python 2, where apparently
|
||||
# isdir() is using listdir(), so the mocking is breaking it.
|
||||
def worker(path):
|
||||
|
|
|
@ -121,6 +121,7 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||
)
|
||||
|
||||
def test_get_image_paths(self):
|
||||
|
||||
# The data for this tests is obtained from the actual Koji build. It
|
||||
# includes lots of fields that are not used, but for the sake of
|
||||
# completeness is fully preserved.
|
||||
|
@ -320,6 +321,7 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||
)
|
||||
|
||||
def test_get_image_paths_failed_subtask(self):
|
||||
|
||||
failed = set()
|
||||
|
||||
def failed_callback(arch):
|
||||
|
|
|
@ -43,7 +43,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -124,7 +124,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -192,7 +192,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -265,7 +265,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -363,7 +363,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -433,7 +433,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -503,7 +503,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -571,7 +571,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||
phase.run()
|
||||
|
||||
# assert at least one thread was started
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.maxDiff = None
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
|
@ -958,9 +958,7 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||
@mock.patch("pungi.phases.live_images.run")
|
||||
@mock.patch("pungi.phases.live_images.KojiWrapper")
|
||||
def test_process_handles_fail(self, KojiWrapper, run, copy2):
|
||||
compose = DummyCompose(
|
||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||
)
|
||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||
pool = mock.Mock()
|
||||
cmd = {
|
||||
"ks_file": "/path/to/ks_file",
|
||||
|
@ -1013,9 +1011,7 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||
@mock.patch("pungi.phases.live_images.run")
|
||||
@mock.patch("pungi.phases.live_images.KojiWrapper")
|
||||
def test_process_handles_exception(self, KojiWrapper, run, copy2):
|
||||
compose = DummyCompose(
|
||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||
)
|
||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||
pool = mock.Mock()
|
||||
cmd = {
|
||||
"ks_file": "/path/to/ks_file",
|
||||
|
|
|
@ -28,7 +28,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -37,7 +36,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
phase = LiveMediaPhase(compose)
|
||||
|
||||
phase.run()
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.assertEqual(
|
||||
phase.pool.queue_put.call_args_list,
|
||||
[
|
||||
|
@ -86,7 +85,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -95,7 +93,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
phase = LiveMediaPhase(compose)
|
||||
|
||||
phase.run()
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.assertEqual(
|
||||
phase.pool.queue_put.call_args_list,
|
||||
[
|
||||
|
@ -150,7 +148,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -159,7 +156,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
phase = LiveMediaPhase(compose)
|
||||
|
||||
phase.run()
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.assertEqual(
|
||||
phase.pool.queue_put.call_args_list,
|
||||
[
|
||||
|
@ -262,7 +259,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -271,7 +267,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
phase = LiveMediaPhase(compose)
|
||||
|
||||
phase.run()
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
self.assertEqual(
|
||||
phase.pool.queue_put.call_args_list,
|
||||
[
|
||||
|
@ -368,7 +364,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -399,7 +394,6 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
]
|
||||
},
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -450,7 +444,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||
phase = LiveMediaPhase(compose)
|
||||
|
||||
phase.run()
|
||||
phase.pool.add.assert_called()
|
||||
self.assertTrue(phase.pool.add.called)
|
||||
|
||||
self.assertEqual(
|
||||
phase.pool.queue_put.call_args_list,
|
||||
|
@ -617,9 +611,7 @@ class TestLiveMediaThread(PungiTestCase):
|
|||
@mock.patch("pungi.phases.livemedia_phase.get_file_size")
|
||||
@mock.patch("pungi.phases.livemedia_phase.KojiWrapper")
|
||||
def test_handle_koji_fail(self, KojiWrapper, get_file_size, get_mtime):
|
||||
compose = DummyCompose(
|
||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||
)
|
||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||
config = {
|
||||
"arches": ["amd64", "x86_64"],
|
||||
"ksfile": "file.ks",
|
||||
|
@ -696,7 +688,6 @@ class TestLiveMediaThread(PungiTestCase):
|
|||
self.topdir,
|
||||
{
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
||||
},
|
||||
)
|
||||
|
@ -766,7 +757,6 @@ class TestLiveMediaThread(PungiTestCase):
|
|||
self.topdir,
|
||||
{
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
||||
},
|
||||
)
|
||||
|
|
|
@ -133,7 +133,7 @@ class TestNotifier(unittest.TestCase):
|
|||
def test_does_not_run_without_config(self, run, makedirs):
|
||||
n = PungiNotifier(None)
|
||||
n.send("cmd", foo="bar", baz="quux")
|
||||
run.assert_not_called()
|
||||
self.assertFalse(run.called)
|
||||
|
||||
@mock.patch("pungi.util.translate_path")
|
||||
@mock.patch("kobo.shortcuts.run")
|
||||
|
@ -146,4 +146,4 @@ class TestNotifier(unittest.TestCase):
|
|||
n.send("cmd", **self.data)
|
||||
|
||||
self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
|
||||
self.compose.log_warning.assert_called()
|
||||
self.assertTrue(self.compose.log_warning.called)
|
||||
|
|
|
@ -0,0 +1,934 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import itertools
|
||||
import json
|
||||
from functools import wraps
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
from textwrap import dedent
|
||||
|
||||
from unittest import mock
|
||||
import six
|
||||
from six.moves import configparser
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from tests.helpers import BaseTestCase, PungiTestCase, touch, FIXTURE_DIR
|
||||
from pungi_utils import orchestrator as o
|
||||
|
||||
|
||||
class TestConfigSubstitute(PungiTestCase):
|
||||
def setUp(self):
|
||||
super(TestConfigSubstitute, self).setUp()
|
||||
self.fp = os.path.join(self.topdir, "config.conf")
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
("hello = 'world'", "hello = 'world'"),
|
||||
("hello = '{{foo}}'", "hello = 'bar'"),
|
||||
("hello = '{{ foo}}'", "hello = 'bar'"),
|
||||
("hello = '{{foo }}'", "hello = 'bar'"),
|
||||
]
|
||||
)
|
||||
def test_substitutions(self, initial, expected):
|
||||
touch(self.fp, initial)
|
||||
o.fill_in_config_file(self.fp, {"foo": "bar"})
|
||||
with open(self.fp) as f:
|
||||
self.assertEqual(expected, f.read())
|
||||
|
||||
def test_missing_key(self):
|
||||
touch(self.fp, "hello = '{{unknown}}'")
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
o.fill_in_config_file(self.fp, {})
|
||||
self.assertEqual(
|
||||
"Unknown placeholder 'unknown' in config.conf", str(ctx.exception)
|
||||
)
|
||||
|
||||
|
||||
class TestSafeGetList(BaseTestCase):
|
||||
@parameterized.expand(
|
||||
[
|
||||
("", []),
|
||||
("foo", ["foo"]),
|
||||
("foo,bar", ["foo", "bar"]),
|
||||
("foo bar", ["foo", "bar"]),
|
||||
]
|
||||
)
|
||||
def test_success(self, value, expected):
|
||||
cf = configparser.RawConfigParser()
|
||||
cf.add_section("general")
|
||||
cf.set("general", "key", value)
|
||||
self.assertEqual(o._safe_get_list(cf, "general", "key"), expected)
|
||||
|
||||
def test_default(self):
|
||||
cf = configparser.RawConfigParser()
|
||||
cf.add_section("general")
|
||||
self.assertEqual(o._safe_get_list(cf, "general", "missing", "hello"), "hello")
|
||||
|
||||
|
||||
class TestComposePart(PungiTestCase):
|
||||
def test_from_minimal_config(self):
|
||||
cf = configparser.RawConfigParser()
|
||||
cf.add_section("test")
|
||||
cf.set("test", "config", "my.conf")
|
||||
|
||||
part = o.ComposePart.from_config(cf, "test", "/tmp/config")
|
||||
deps = "set()" if six.PY3 else "set([])"
|
||||
self.assertEqual(str(part), "test")
|
||||
self.assertEqual(
|
||||
repr(part),
|
||||
"ComposePart('test', '/tmp/config/my.conf', 'READY', "
|
||||
"just_phase=[], skip_phase=[], dependencies=%s)" % deps,
|
||||
)
|
||||
self.assertFalse(part.failable)
|
||||
|
||||
def test_from_full_config(self):
|
||||
cf = configparser.RawConfigParser()
|
||||
cf.add_section("test")
|
||||
cf.set("test", "config", "my.conf")
|
||||
cf.set("test", "depends_on", "base")
|
||||
cf.set("test", "skip_phase", "skip")
|
||||
cf.set("test", "just_phase", "just")
|
||||
cf.set("test", "failable", "yes")
|
||||
|
||||
part = o.ComposePart.from_config(cf, "test", "/tmp/config")
|
||||
deps = "{'base'}" if six.PY3 else "set(['base'])"
|
||||
self.assertEqual(
|
||||
repr(part),
|
||||
"ComposePart('test', '/tmp/config/my.conf', 'WAITING', "
|
||||
"just_phase=['just'], skip_phase=['skip'], dependencies=%s)" % deps,
|
||||
)
|
||||
self.assertTrue(part.failable)
|
||||
|
||||
def test_get_cmd(self):
|
||||
conf = o.Config(
|
||||
"/tgt/", "production", "RC-1.0", "/old", "/cfg", 1234, ["--quiet"]
|
||||
)
|
||||
part = o.ComposePart(
|
||||
"test", "/tmp/my.conf", just_phase=["just"], skip_phase=["skip"]
|
||||
)
|
||||
part.path = "/compose"
|
||||
|
||||
self.assertEqual(
|
||||
part.get_cmd(conf),
|
||||
[
|
||||
"pungi-koji",
|
||||
"--config",
|
||||
"/tmp/my.conf",
|
||||
"--compose-dir",
|
||||
"/compose",
|
||||
"--production",
|
||||
"--label",
|
||||
"RC-1.0",
|
||||
"--just-phase",
|
||||
"just",
|
||||
"--skip-phase",
|
||||
"skip",
|
||||
"--old-compose",
|
||||
"/old/parts",
|
||||
"--koji-event",
|
||||
"1234",
|
||||
"--quiet",
|
||||
"--no-latest-link",
|
||||
],
|
||||
)
|
||||
|
||||
def test_refresh_status(self):
|
||||
part = o.ComposePart("test", "/tmp/my.conf")
|
||||
part.path = os.path.join(self.topdir)
|
||||
touch(os.path.join(self.topdir, "STATUS"), "FINISHED")
|
||||
part.refresh_status()
|
||||
self.assertEqual(part.status, "FINISHED")
|
||||
|
||||
def test_refresh_status_missing_file(self):
|
||||
part = o.ComposePart("test", "/tmp/my.conf")
|
||||
part.path = os.path.join(self.topdir)
|
||||
part.refresh_status()
|
||||
self.assertEqual(part.status, "DOOMED")
|
||||
|
||||
@parameterized.expand(["FINISHED", "FINISHED_INCOMPLETE"])
|
||||
def test_is_finished(self, status):
|
||||
part = o.ComposePart("test", "/tmp/my.conf")
|
||||
part.status = status
|
||||
self.assertTrue(part.is_finished())
|
||||
|
||||
@parameterized.expand(["STARTED", "WAITING"])
|
||||
def test_is_not_finished(self, status):
|
||||
part = o.ComposePart("test", "/tmp/my.conf")
|
||||
part.status = status
|
||||
self.assertFalse(part.is_finished())
|
||||
|
||||
@mock.patch("pungi_utils.orchestrator.fill_in_config_file")
|
||||
@mock.patch("pungi_utils.orchestrator.get_compose_dir")
|
||||
@mock.patch("kobo.conf.PyConfigParser")
|
||||
def test_setup_start(self, Conf, gcd, ficf):
|
||||
def pth(*path):
|
||||
return os.path.join(self.topdir, *path)
|
||||
|
||||
conf = o.Config(
|
||||
pth("tgt"), "production", "RC-1.0", "/old", pth("cfg"), None, None
|
||||
)
|
||||
part = o.ComposePart("test", "/tmp/my.conf")
|
||||
parts = {"base": mock.Mock(path="/base", is_finished=lambda: True)}
|
||||
Conf.return_value.opened_files = ["foo.conf"]
|
||||
|
||||
part.setup_start(conf, parts)
|
||||
|
||||
self.assertEqual(part.status, "STARTED")
|
||||
self.assertEqual(part.path, gcd.return_value)
|
||||
self.assertEqual(part.log_file, pth("tgt", "logs", "test.log"))
|
||||
self.assertEqual(
|
||||
ficf.call_args_list,
|
||||
[mock.call("foo.conf", {"part-base": "/base", "configdir": pth("cfg")})],
|
||||
)
|
||||
self.assertEqual(
|
||||
gcd.call_args_list,
|
||||
[
|
||||
mock.call(
|
||||
pth("tgt/parts"),
|
||||
Conf.return_value,
|
||||
compose_type="production",
|
||||
compose_label="RC-1.0",
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
# Nothing blocking, no change
|
||||
([], [], o.Status.READY),
|
||||
# Remove last blocker and switch to READY
|
||||
(["finished"], [], o.Status.READY),
|
||||
# Blocker remaining, stay in WAITING
|
||||
(["finished", "block"], ["block"], o.Status.WAITING),
|
||||
]
|
||||
)
|
||||
def test_unblock_on(self, deps, blockers, status):
|
||||
part = o.ComposePart("test", "/tmp/my.conf", dependencies=deps)
|
||||
part.unblock_on("finished")
|
||||
six.assertCountEqual(self, part.blocked_on, blockers)
|
||||
self.assertEqual(part.status, status)
|
||||
|
||||
|
||||
class TestStartPart(PungiTestCase):
|
||||
@mock.patch("subprocess.Popen")
|
||||
def test_start(self, Popen):
|
||||
part = mock.Mock(log_file=os.path.join(self.topdir, "log"))
|
||||
config = mock.Mock()
|
||||
parts = mock.Mock()
|
||||
cmd = ["pungi-koji", "..."]
|
||||
|
||||
part.get_cmd.return_value = cmd
|
||||
|
||||
proc = o.start_part(config, parts, part)
|
||||
|
||||
self.assertEqual(
|
||||
part.mock_calls,
|
||||
[mock.call.setup_start(config, parts), mock.call.get_cmd(config)],
|
||||
)
|
||||
self.assertEqual(proc, Popen.return_value)
|
||||
self.assertEqual(
|
||||
Popen.call_args_list,
|
||||
[mock.call(cmd, stdout=mock.ANY, stderr=subprocess.STDOUT)],
|
||||
)
|
||||
|
||||
|
||||
class TestHandleFinished(BaseTestCase):
|
||||
def setUp(self):
|
||||
self.config = mock.Mock()
|
||||
self.linker = mock.Mock()
|
||||
self.parts = {"a": mock.Mock(), "b": mock.Mock()}
|
||||
|
||||
@mock.patch("pungi_utils.orchestrator.update_metadata")
|
||||
@mock.patch("pungi_utils.orchestrator.copy_part")
|
||||
def test_handle_success(self, cp, um):
|
||||
proc = mock.Mock(returncode=0)
|
||||
o.handle_finished(self.config, self.linker, self.parts, proc, self.parts["a"])
|
||||
|
||||
self.assertEqual(
|
||||
self.parts["a"].mock_calls,
|
||||
[mock.call.refresh_status(), mock.call.unblock_on(self.parts["a"].name)],
|
||||
)
|
||||
self.assertEqual(
|
||||
self.parts["b"].mock_calls, [mock.call.unblock_on(self.parts["a"].name)]
|
||||
)
|
||||
self.assertEqual(
|
||||
cp.call_args_list, [mock.call(self.config, self.linker, self.parts["a"])]
|
||||
)
|
||||
self.assertEqual(um.call_args_list, [mock.call(self.config, self.parts["a"])])
|
||||
|
||||
@mock.patch("pungi_utils.orchestrator.block_on")
|
||||
def test_handle_failure(self, bo):
|
||||
proc = mock.Mock(returncode=1)
|
||||
o.handle_finished(self.config, self.linker, self.parts, proc, self.parts["a"])
|
||||
|
||||
self.assertEqual(self.parts["a"].mock_calls, [mock.call.refresh_status()])
|
||||
|
||||
self.assertEqual(
|
||||
bo.call_args_list, [mock.call(self.parts, self.parts["a"].name)]
|
||||
)
|
||||
|
||||
|
||||
class TestBlockOn(BaseTestCase):
|
||||
def test_single(self):
|
||||
parts = {"b": o.ComposePart("b", "b.conf", dependencies=["a"])}
|
||||
|
||||
o.block_on(parts, "a")
|
||||
|
||||
self.assertEqual(parts["b"].status, o.Status.BLOCKED)
|
||||
|
||||
def test_chain(self):
|
||||
parts = {
|
||||
"b": o.ComposePart("b", "b.conf", dependencies=["a"]),
|
||||
"c": o.ComposePart("c", "c.conf", dependencies=["b"]),
|
||||
"d": o.ComposePart("d", "d.conf", dependencies=["c"]),
|
||||
}
|
||||
|
||||
o.block_on(parts, "a")
|
||||
|
||||
self.assertEqual(parts["b"].status, o.Status.BLOCKED)
|
||||
self.assertEqual(parts["c"].status, o.Status.BLOCKED)
|
||||
self.assertEqual(parts["d"].status, o.Status.BLOCKED)
|
||||
|
||||
|
||||
class TestUpdateMetadata(PungiTestCase):
|
||||
def assertEqualJSON(self, f1, f2):
|
||||
with open(f1) as f:
|
||||
actual = json.load(f)
|
||||
with open(f2) as f:
|
||||
expected = json.load(f)
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def assertEqualMetadata(self, expected):
|
||||
expected_dir = os.path.join(FIXTURE_DIR, expected, "compose/metadata")
|
||||
for f in os.listdir(expected_dir):
|
||||
self.assertEqualJSON(
|
||||
os.path.join(self.tgt, "compose/metadata", f),
|
||||
os.path.join(expected_dir, f),
|
||||
)
|
||||
|
||||
@parameterized.expand(["empty-metadata", "basic-metadata"])
|
||||
def test_merge_into_empty(self, fixture):
|
||||
self.tgt = os.path.join(self.topdir, "target")
|
||||
|
||||
conf = o.Config(self.tgt, "production", None, None, None, None, [])
|
||||
part = o.ComposePart("test", "/tmp/my.conf")
|
||||
part.path = os.path.join(FIXTURE_DIR, "DP-1.0-20181001.n.0")
|
||||
|
||||
shutil.copytree(os.path.join(FIXTURE_DIR, fixture), self.tgt)
|
||||
|
||||
o.update_metadata(conf, part)
|
||||
|
||||
self.assertEqualMetadata(fixture + "-merged")
|
||||
|
||||
|
||||
class TestCopyPart(PungiTestCase):
|
||||
@mock.patch("pungi_utils.orchestrator.hardlink_dir")
|
||||
def test_copy(self, hd):
|
||||
self.tgt = os.path.join(self.topdir, "target")
|
||||
conf = o.Config(self.tgt, "production", None, None, None, None, [])
|
||||
linker = mock.Mock()
|
||||
part = o.ComposePart("test", "/tmp/my.conf")
|
||||
part.path = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
||||
|
||||
o.copy_part(conf, linker, part)
|
||||
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
hd.call_args_list,
|
||||
[
|
||||
mock.call(
|
||||
linker,
|
||||
os.path.join(part.path, "compose", variant),
|
||||
os.path.join(self.tgt, "compose", variant),
|
||||
)
|
||||
for variant in ["Client", "Server"]
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class TestHardlinkDir(PungiTestCase):
|
||||
def test_hardlinking(self):
|
||||
linker = mock.Mock()
|
||||
src = os.path.join(self.topdir, "src")
|
||||
dst = os.path.join(self.topdir, "dst")
|
||||
files = ["file.txt", "nested/deep/another.txt"]
|
||||
|
||||
for f in files:
|
||||
touch(os.path.join(src, f))
|
||||
|
||||
o.hardlink_dir(linker, src, dst)
|
||||
|
||||
six.assertCountEqual(
|
||||
self,
|
||||
linker.queue_put.call_args_list,
|
||||
[mock.call((os.path.join(src, f), os.path.join(dst, f))) for f in files],
|
||||
)
|
||||
|
||||
|
||||
class TestCheckFinishedProcesses(BaseTestCase):
|
||||
def test_nothing_finished(self):
|
||||
k1 = mock.Mock(returncode=None)
|
||||
v1 = mock.Mock()
|
||||
processes = {k1: v1}
|
||||
|
||||
six.assertCountEqual(self, o.check_finished_processes(processes), [])
|
||||
|
||||
def test_yields_finished(self):
|
||||
k1 = mock.Mock(returncode=None)
|
||||
v1 = mock.Mock()
|
||||
k2 = mock.Mock(returncode=0)
|
||||
v2 = mock.Mock()
|
||||
processes = {k1: v1, k2: v2}
|
||||
|
||||
six.assertCountEqual(self, o.check_finished_processes(processes), [(k2, v2)])
|
||||
|
||||
def test_yields_failed(self):
|
||||
k1 = mock.Mock(returncode=1)
|
||||
v1 = mock.Mock()
|
||||
processes = {k1: v1}
|
||||
|
||||
six.assertCountEqual(self, o.check_finished_processes(processes), [(k1, v1)])
|
||||
|
||||
|
||||
class _Part(object):
|
||||
def __init__(self, name, parent=None, fails=False, status=None):
|
||||
self.name = name
|
||||
self.finished = False
|
||||
self.status = o.Status.WAITING if parent else o.Status.READY
|
||||
if status:
|
||||
self.status = status
|
||||
self.proc = mock.Mock(name="proc_%s" % name, pid=hash(self))
|
||||
self.parent = parent
|
||||
self.fails = fails
|
||||
self.failable = False
|
||||
self.path = "/path/to/%s" % name
|
||||
self.blocked_on = set([parent]) if parent else set()
|
||||
|
||||
def is_finished(self):
|
||||
return self.finished or self.status == "FINISHED"
|
||||
|
||||
def __repr__(self):
|
||||
return "<_Part(%r, parent=%r)>" % (self.name, self.parent)
|
||||
|
||||
|
||||
def with_mocks(parts, finish_order, wait_results):
|
||||
"""Setup all mocks and create dict with the parts.
|
||||
:param finish_order: nested list: first element contains parts that finish
|
||||
in first iteration, etc.
|
||||
:param wait_results: list of names of processes that are returned by wait in each
|
||||
iteration
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def worker(self, lp, update_status, cfp, hf, sp, wait):
|
||||
self.parts = dict((p.name, p) for p in parts)
|
||||
self.linker = lp.return_value.__enter__.return_value
|
||||
|
||||
update_status.side_effect = self.mock_update
|
||||
hf.side_effect = self.mock_finish
|
||||
sp.side_effect = self.mock_start
|
||||
|
||||
finish = [[]]
|
||||
for grp in finish_order:
|
||||
finish.append([(self.parts[p].proc, self.parts[p]) for p in grp])
|
||||
|
||||
cfp.side_effect = finish
|
||||
wait.side_effect = [(self.parts[p].proc.pid, 0) for p in wait_results]
|
||||
|
||||
func(self)
|
||||
|
||||
self.assertEqual(lp.call_args_list, [mock.call("hardlink")])
|
||||
|
||||
return worker
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@mock.patch("os.wait")
|
||||
@mock.patch("pungi_utils.orchestrator.start_part")
|
||||
@mock.patch("pungi_utils.orchestrator.handle_finished")
|
||||
@mock.patch("pungi_utils.orchestrator.check_finished_processes")
|
||||
@mock.patch("pungi_utils.orchestrator.update_status")
|
||||
@mock.patch("pungi_utils.orchestrator.linker_pool")
|
||||
class TestRunAll(BaseTestCase):
|
||||
def setUp(self):
|
||||
self.maxDiff = None
|
||||
self.conf = mock.Mock(name="global_config")
|
||||
self.calls = []
|
||||
|
||||
def mock_update(self, global_config, parts):
|
||||
self.assertEqual(global_config, self.conf)
|
||||
self.assertEqual(parts, self.parts)
|
||||
self.calls.append("update_status")
|
||||
|
||||
def mock_start(self, global_config, parts, part):
|
||||
self.assertEqual(global_config, self.conf)
|
||||
self.assertEqual(parts, self.parts)
|
||||
self.calls.append(("start_part", part.name))
|
||||
part.status = o.Status.STARTED
|
||||
return part.proc
|
||||
|
||||
@property
|
||||
def sorted_calls(self):
|
||||
"""Sort the consecutive calls of the same function based on the argument."""
|
||||
|
||||
def key(val):
|
||||
return val[0] if isinstance(val, tuple) else val
|
||||
|
||||
return list(
|
||||
itertools.chain.from_iterable(
|
||||
sorted(grp, key=operator.itemgetter(1))
|
||||
for _, grp in itertools.groupby(self.calls, key)
|
||||
)
|
||||
)
|
||||
|
||||
def mock_finish(self, global_config, linker, parts, proc, part):
|
||||
self.assertEqual(global_config, self.conf)
|
||||
self.assertEqual(linker, self.linker)
|
||||
self.assertEqual(parts, self.parts)
|
||||
self.calls.append(("handle_finished", part.name))
|
||||
for child in parts.values():
|
||||
if child.parent == part.name:
|
||||
child.status = o.Status.BLOCKED if part.fails else o.Status.READY
|
||||
part.status = "DOOMED" if part.fails else "FINISHED"
|
||||
|
||||
@with_mocks(
|
||||
[_Part("fst"), _Part("snd", parent="fst")], [["fst"], ["snd"]], ["fst", "snd"]
|
||||
)
|
||||
def test_sequential(self):
|
||||
o.run_all(self.conf, self.parts)
|
||||
|
||||
self.assertEqual(
|
||||
self.sorted_calls,
|
||||
[
|
||||
# First iteration starts fst
|
||||
"update_status",
|
||||
("start_part", "fst"),
|
||||
# Second iteration handles finish of fst and starts snd
|
||||
"update_status",
|
||||
("handle_finished", "fst"),
|
||||
("start_part", "snd"),
|
||||
# Third iteration handles finish of snd
|
||||
"update_status",
|
||||
("handle_finished", "snd"),
|
||||
# Final update of status
|
||||
"update_status",
|
||||
],
|
||||
)
|
||||
|
||||
@with_mocks([_Part("fst"), _Part("snd")], [["fst", "snd"]], ["fst"])
|
||||
def test_parallel(self):
|
||||
o.run_all(self.conf, self.parts)
|
||||
|
||||
self.assertEqual(
|
||||
self.sorted_calls,
|
||||
[
|
||||
# First iteration starts both fst and snd
|
||||
"update_status",
|
||||
("start_part", "fst"),
|
||||
("start_part", "snd"),
|
||||
# Second iteration handles finish of both of them
|
||||
"update_status",
|
||||
("handle_finished", "fst"),
|
||||
("handle_finished", "snd"),
|
||||
# Final update of status
|
||||
"update_status",
|
||||
],
|
||||
)
|
||||
|
||||
@with_mocks(
|
||||
[_Part("1"), _Part("2", parent="1"), _Part("3", parent="1")],
|
||||
[["1"], ["2", "3"]],
|
||||
["1", "2"],
|
||||
)
|
||||
def test_waits_for_dep_then_parallel_with_simultaneous_end(self):
|
||||
o.run_all(self.conf, self.parts)
|
||||
|
||||
self.assertEqual(
|
||||
self.sorted_calls,
|
||||
[
|
||||
# First iteration starts first part
|
||||
"update_status",
|
||||
("start_part", "1"),
|
||||
# Second iteration starts 2 and 3
|
||||
"update_status",
|
||||
("handle_finished", "1"),
|
||||
("start_part", "2"),
|
||||
("start_part", "3"),
|
||||
# Both 2 and 3 end in third iteration
|
||||
"update_status",
|
||||
("handle_finished", "2"),
|
||||
("handle_finished", "3"),
|
||||
# Final update of status
|
||||
"update_status",
|
||||
],
|
||||
)
|
||||
|
||||
@with_mocks(
|
||||
[_Part("1"), _Part("2", parent="1"), _Part("3", parent="1")],
|
||||
[["1"], ["3"], ["2"]],
|
||||
["1", "3", "2"],
|
||||
)
|
||||
def test_waits_for_dep_then_parallel_with_different_end_times(self):
|
||||
o.run_all(self.conf, self.parts)
|
||||
|
||||
self.assertEqual(
|
||||
self.sorted_calls,
|
||||
[
|
||||
# First iteration starts first part
|
||||
"update_status",
|
||||
("start_part", "1"),
|
||||
# Second iteration starts 2 and 3
|
||||
"update_status",
|
||||
("handle_finished", "1"),
|
||||
("start_part", "2"),
|
||||
("start_part", "3"),
|
||||
# Third iteration sees 3 finish
|
||||
"update_status",
|
||||
("handle_finished", "3"),
|
||||
# Fourth iteration, 2 finishes
|
||||
"update_status",
|
||||
("handle_finished", "2"),
|
||||
# Final update of status
|
||||
"update_status",
|
||||
],
|
||||
)
|
||||
|
||||
@with_mocks(
|
||||
[_Part("fst", fails=True), _Part("snd", parent="fst")], [["fst"]], ["fst"]
|
||||
)
|
||||
def test_blocked(self):
|
||||
o.run_all(self.conf, self.parts)
|
||||
|
||||
self.assertEqual(
|
||||
self.sorted_calls,
|
||||
[
|
||||
# First iteration starts first part
|
||||
"update_status",
|
||||
("start_part", "fst"),
|
||||
# Second iteration handles fail of first part
|
||||
"update_status",
|
||||
("handle_finished", "fst"),
|
||||
# Final update of status
|
||||
"update_status",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@mock.patch("pungi_utils.orchestrator.get_compose_dir")
|
||||
class TestGetTargetDir(BaseTestCase):
|
||||
def test_with_absolute_path(self, gcd):
|
||||
config = {"target": "/tgt", "compose_type": "nightly"}
|
||||
cfg = mock.Mock()
|
||||
cfg.get.side_effect = lambda _, k: config[k]
|
||||
ci = mock.Mock()
|
||||
res = o.get_target_dir(cfg, ci, None, reldir="/checkout")
|
||||
self.assertEqual(res, gcd.return_value)
|
||||
self.assertEqual(
|
||||
gcd.call_args_list,
|
||||
[mock.call("/tgt", ci, compose_type="nightly", compose_label=None)],
|
||||
)
|
||||
|
||||
def test_with_relative_path(self, gcd):
|
||||
config = {"target": "tgt", "compose_type": "nightly"}
|
||||
cfg = mock.Mock()
|
||||
cfg.get.side_effect = lambda _, k: config[k]
|
||||
ci = mock.Mock()
|
||||
res = o.get_target_dir(cfg, ci, None, reldir="/checkout")
|
||||
self.assertEqual(res, gcd.return_value)
|
||||
self.assertEqual(
|
||||
gcd.call_args_list,
|
||||
[
|
||||
mock.call(
|
||||
"/checkout/tgt", ci, compose_type="nightly", compose_label=None
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class TestComputeStatus(BaseTestCase):
|
||||
@parameterized.expand(
|
||||
[
|
||||
([("FINISHED", False)], "FINISHED"),
|
||||
([("FINISHED", False), ("STARTED", False)], "STARTED"),
|
||||
([("FINISHED", False), ("STARTED", False), ("WAITING", False)], "STARTED"),
|
||||
([("FINISHED", False), ("DOOMED", False)], "DOOMED"),
|
||||
(
|
||||
[("FINISHED", False), ("BLOCKED", True), ("DOOMED", True)],
|
||||
"FINISHED_INCOMPLETE",
|
||||
),
|
||||
([("FINISHED", False), ("BLOCKED", False), ("DOOMED", True)], "DOOMED"),
|
||||
([("FINISHED", False), ("DOOMED", True)], "FINISHED_INCOMPLETE"),
|
||||
([("FINISHED", False), ("STARTED", False), ("DOOMED", False)], "STARTED"),
|
||||
]
|
||||
)
|
||||
def test_cases(self, statuses, expected):
|
||||
self.assertEqual(o.compute_status(statuses), expected)
|
||||
|
||||
|
||||
class TestUpdateStatus(PungiTestCase):
|
||||
def test_updating(self):
|
||||
os.makedirs(os.path.join(self.topdir, "compose/metadata"))
|
||||
conf = o.Config(
|
||||
self.topdir, "production", "RC-1.0", "/old", "/cfg", 1234, ["--quiet"]
|
||||
)
|
||||
o.update_status(
|
||||
conf,
|
||||
{"1": _Part("1", status="FINISHED"), "2": _Part("2", status="STARTED")},
|
||||
)
|
||||
self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
|
||||
self.assertFileContent(
|
||||
os.path.join(self.topdir, "compose/metadata/parts.json"),
|
||||
dedent(
|
||||
"""\
|
||||
{
|
||||
"1": {
|
||||
"path": "/path/to/1",
|
||||
"status": "FINISHED"
|
||||
},
|
||||
"2": {
|
||||
"path": "/path/to/2",
|
||||
"status": "STARTED"
|
||||
}
|
||||
}
|
||||
"""
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@mock.patch("pungi_utils.orchestrator.get_target_dir")
|
||||
class TestPrepareComposeDir(PungiTestCase):
|
||||
def setUp(self):
|
||||
super(TestPrepareComposeDir, self).setUp()
|
||||
self.conf = mock.Mock(name="config")
|
||||
self.main_config = "/some/config"
|
||||
self.compose_info = mock.Mock(name="compose_info")
|
||||
|
||||
def test_new_compose(self, gtd):
|
||||
def mock_get_target(conf, compose_info, label, reldir):
|
||||
self.assertEqual(conf, self.conf)
|
||||
self.assertEqual(compose_info, self.compose_info)
|
||||
self.assertEqual(label, args.label)
|
||||
self.assertEqual(reldir, "/some")
|
||||
touch(os.path.join(self.topdir, "work/global/composeinfo-base.json"), "WOO")
|
||||
return self.topdir
|
||||
|
||||
gtd.side_effect = mock_get_target
|
||||
args = mock.Mock(name="args", spec=["label"])
|
||||
retval = o.prepare_compose_dir(
|
||||
self.conf, args, self.main_config, self.compose_info
|
||||
)
|
||||
self.assertEqual(retval, self.topdir)
|
||||
self.assertFileContent(
|
||||
os.path.join(self.topdir, "compose/metadata/composeinfo.json"), "WOO"
|
||||
)
|
||||
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "logs")))
|
||||
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "parts")))
|
||||
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "work/global")))
|
||||
self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
|
||||
|
||||
def test_restarting_compose(self, gtd):
|
||||
args = mock.Mock(name="args", spec=["label", "compose_path"])
|
||||
retval = o.prepare_compose_dir(
|
||||
self.conf, args, self.main_config, self.compose_info
|
||||
)
|
||||
self.assertEqual(gtd.call_args_list, [])
|
||||
self.assertEqual(retval, args.compose_path)
|
||||
|
||||
|
||||
class TestLoadPartsMetadata(PungiTestCase):
|
||||
def test_loading(self):
|
||||
touch(
|
||||
os.path.join(self.topdir, "compose/metadata/parts.json"), '{"foo": "bar"}'
|
||||
)
|
||||
conf = mock.Mock(target=self.topdir)
|
||||
|
||||
self.assertEqual(o.load_parts_metadata(conf), {"foo": "bar"})
|
||||
|
||||
|
||||
@mock.patch("pungi_utils.orchestrator.load_parts_metadata")
|
||||
class TestSetupForRestart(BaseTestCase):
|
||||
def setUp(self):
|
||||
self.conf = mock.Mock(name="global_config")
|
||||
|
||||
def test_restart_ok(self, lpm):
|
||||
lpm.return_value = {
|
||||
"p1": {"status": "FINISHED", "path": "/p1"},
|
||||
"p2": {"status": "DOOMED", "path": "/p2"},
|
||||
}
|
||||
parts = {"p1": _Part("p1"), "p2": _Part("p2", parent="p1")}
|
||||
|
||||
o.setup_for_restart(self.conf, parts, ["p2"])
|
||||
|
||||
self.assertEqual(parts["p1"].status, "FINISHED")
|
||||
self.assertEqual(parts["p1"].path, "/p1")
|
||||
self.assertEqual(parts["p2"].status, "READY")
|
||||
self.assertEqual(parts["p2"].path, None)
|
||||
|
||||
def test_restart_one_blocked_one_ok(self, lpm):
|
||||
lpm.return_value = {
|
||||
"p1": {"status": "DOOMED", "path": "/p1"},
|
||||
"p2": {"status": "DOOMED", "path": "/p2"},
|
||||
"p3": {"status": "WAITING", "path": None},
|
||||
}
|
||||
parts = {
|
||||
"p1": _Part("p1"),
|
||||
"p2": _Part("p2", parent="p1"),
|
||||
"p3": _Part("p3", parent="p2"),
|
||||
}
|
||||
|
||||
o.setup_for_restart(self.conf, parts, ["p1", "p3"])
|
||||
|
||||
self.assertEqual(parts["p1"].status, "READY")
|
||||
self.assertEqual(parts["p1"].path, None)
|
||||
self.assertEqual(parts["p2"].status, "DOOMED")
|
||||
self.assertEqual(parts["p2"].path, "/p2")
|
||||
self.assertEqual(parts["p3"].status, "WAITING")
|
||||
self.assertEqual(parts["p3"].path, None)
|
||||
|
||||
def test_restart_all_blocked(self, lpm):
|
||||
lpm.return_value = {
|
||||
"p1": {"status": "DOOMED", "path": "/p1"},
|
||||
"p2": {"status": "STARTED", "path": "/p2"},
|
||||
}
|
||||
parts = {"p1": _Part("p1"), "p2": _Part("p2", parent="p1")}
|
||||
|
||||
with self.assertRaises(RuntimeError):
|
||||
o.setup_for_restart(self.conf, parts, ["p2"])
|
||||
|
||||
self.assertEqual(parts["p1"].status, "DOOMED")
|
||||
self.assertEqual(parts["p1"].path, "/p1")
|
||||
self.assertEqual(parts["p2"].status, "WAITING")
|
||||
self.assertEqual(parts["p2"].path, None)
|
||||
|
||||
|
||||
@mock.patch("atexit.register")
|
||||
@mock.patch("kobo.shortcuts.run")
|
||||
class TestRunKinit(BaseTestCase):
|
||||
def test_without_config(self, run, register):
|
||||
conf = mock.Mock()
|
||||
conf.getboolean.return_value = False
|
||||
|
||||
o.run_kinit(conf)
|
||||
|
||||
self.assertEqual(run.call_args_list, [])
|
||||
self.assertEqual(register.call_args_list, [])
|
||||
|
||||
@mock.patch.dict("os.environ")
|
||||
def test_with_config(self, run, register):
|
||||
conf = mock.Mock()
|
||||
conf.getboolean.return_value = True
|
||||
conf.get.side_effect = lambda section, option: option
|
||||
|
||||
o.run_kinit(conf)
|
||||
|
||||
self.assertEqual(
|
||||
run.call_args_list,
|
||||
[mock.call(["kinit", "-k", "-t", "kerberos_keytab", "kerberos_principal"])],
|
||||
)
|
||||
self.assertEqual(
|
||||
register.call_args_list, [mock.call(os.remove, os.environ["KRB5CCNAME"])]
|
||||
)
|
||||
|
||||
|
||||
@mock.patch.dict("os.environ", {}, clear=True)
|
||||
class TestGetScriptEnv(BaseTestCase):
|
||||
def test_without_metadata(self):
|
||||
env = o.get_script_env("/foobar")
|
||||
self.assertEqual(env, {"COMPOSE_PATH": "/foobar"})
|
||||
|
||||
def test_with_metadata(self):
|
||||
compose_dir = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
||||
env = o.get_script_env(compose_dir)
|
||||
self.maxDiff = None
|
||||
self.assertEqual(
|
||||
env,
|
||||
{
|
||||
"COMPOSE_PATH": compose_dir,
|
||||
"COMPOSE_ID": "DP-1.0-20161013.t.4",
|
||||
"COMPOSE_DATE": "20161013",
|
||||
"COMPOSE_TYPE": "test",
|
||||
"COMPOSE_RESPIN": "4",
|
||||
"COMPOSE_LABEL": "",
|
||||
"RELEASE_ID": "DP-1.0",
|
||||
"RELEASE_NAME": "Dummy Product",
|
||||
"RELEASE_SHORT": "DP",
|
||||
"RELEASE_VERSION": "1.0",
|
||||
"RELEASE_TYPE": "ga",
|
||||
"RELEASE_IS_LAYERED": "",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class TestRunScripts(BaseTestCase):
|
||||
@mock.patch("pungi_utils.orchestrator.get_script_env")
|
||||
@mock.patch("kobo.shortcuts.run")
|
||||
def test_run_scripts(self, run, get_env):
|
||||
commands = """
|
||||
date
|
||||
env
|
||||
"""
|
||||
|
||||
o.run_scripts("pref_", "/tmp/compose", commands)
|
||||
|
||||
self.assertEqual(
|
||||
run.call_args_list,
|
||||
[
|
||||
mock.call(
|
||||
"date",
|
||||
logfile="/tmp/compose/logs/pref_0.log",
|
||||
env=get_env.return_value,
|
||||
),
|
||||
mock.call(
|
||||
"env",
|
||||
logfile="/tmp/compose/logs/pref_1.log",
|
||||
env=get_env.return_value,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@mock.patch("pungi.notifier.PungiNotifier")
|
||||
class TestSendNotification(BaseTestCase):
|
||||
def test_no_command(self, notif):
|
||||
o.send_notification("/foobar", None, None)
|
||||
self.assertEqual(notif.mock_calls, [])
|
||||
|
||||
@mock.patch("pungi.util.load_config")
|
||||
def test_with_command_and_translate(self, load_config, notif):
|
||||
compose_dir = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
||||
load_config.return_value = {
|
||||
"translate_paths": [(os.path.dirname(compose_dir), "http://example.com")],
|
||||
}
|
||||
parts = {"foo": mock.Mock()}
|
||||
|
||||
o.send_notification(compose_dir, "handler", parts)
|
||||
|
||||
self.assertEqual(len(notif.mock_calls), 2)
|
||||
self.assertEqual(notif.mock_calls[0], mock.call(["handler"]))
|
||||
_, args, kwargs = notif.mock_calls[1]
|
||||
self.assertEqual(args, ("status-change",))
|
||||
self.assertEqual(
|
||||
kwargs,
|
||||
{
|
||||
"status": "FINISHED",
|
||||
"workdir": compose_dir,
|
||||
"location": "http://example.com/DP-1.0-20161013.t.4",
|
||||
"compose_id": "DP-1.0-20161013.t.4",
|
||||
"compose_date": "20161013",
|
||||
"compose_type": "test",
|
||||
"compose_respin": "4",
|
||||
"compose_label": None,
|
||||
"release_id": "DP-1.0",
|
||||
"release_name": "Dummy Product",
|
||||
"release_short": "DP",
|
||||
"release_version": "1.0",
|
||||
"release_type": "ga",
|
||||
"release_is_layered": False,
|
||||
},
|
||||
)
|
||||
self.assertEqual(load_config.call_args_list, [mock.call(parts["foo"].config)])
|
|
@ -171,7 +171,6 @@ class OSBSThreadTest(helpers.PungiTestCase):
|
|||
self.topdir,
|
||||
{
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"translate_paths": [(self.topdir, "http://root")],
|
||||
},
|
||||
)
|
||||
|
|
|
@ -239,7 +239,6 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
|
|||
self.topdir,
|
||||
{
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"translate_paths": [(self.topdir, "http://root")],
|
||||
},
|
||||
)
|
||||
|
|
|
@ -103,7 +103,6 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||
"release_name": "Fedora",
|
||||
"release_version": "Rawhide",
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"runroot_tag": "rrt",
|
||||
"image_volid_formats": ["{release_short}-{variant}-{arch}"],
|
||||
"translate_paths": [(self.topdir + "/work", "http://example.com/work")],
|
||||
|
|
|
@ -123,7 +123,6 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||
self.topdir,
|
||||
{
|
||||
"koji_profile": "koji",
|
||||
"koji_cache": "/tmp",
|
||||
"runroot_tag": "rrt",
|
||||
"translate_paths": [(self.topdir, "http://example.com")],
|
||||
},
|
||||
|
|
|
@ -315,6 +315,7 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
|
|||
|
||||
@mock.patch("kobo.shortcuts.run")
|
||||
def test_extra_config_with_keep_original_sources(self, run):
|
||||
|
||||
configdir = os.path.join(self.topdir, "config")
|
||||
self._make_dummy_config_dir(configdir)
|
||||
treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
|
||||
|
|
|
@ -47,7 +47,7 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
|
|||
pkgset.name = name
|
||||
pkgset.reuse = None
|
||||
|
||||
def mock_subset(primary, arch_list, **kwargs):
|
||||
def mock_subset(primary, arch_list, exclusive_noarch):
|
||||
self.subsets[primary] = mock.Mock()
|
||||
return self.subsets[primary]
|
||||
|
||||
|
@ -73,16 +73,10 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
|
|||
self.assertEqual(result["amd64"], self.subsets["amd64"])
|
||||
|
||||
self.pkgset.subset.assert_any_call(
|
||||
"x86_64",
|
||||
["x86_64", "noarch", "src"],
|
||||
exclusive_noarch=True,
|
||||
inherit_to_noarch=True,
|
||||
"x86_64", ["x86_64", "noarch", "src"], exclusive_noarch=True
|
||||
)
|
||||
self.pkgset.subset.assert_any_call(
|
||||
"amd64",
|
||||
["amd64", "x86_64", "noarch", "src"],
|
||||
exclusive_noarch=True,
|
||||
inherit_to_noarch=True,
|
||||
"amd64", ["amd64", "x86_64", "noarch", "src"], exclusive_noarch=True
|
||||
)
|
||||
|
||||
for arch, pkgset in result.package_sets.items():
|
||||
|
|
|
@ -133,14 +133,6 @@ class PkgsetCompareMixin(object):
|
|||
self.assertEqual({}, actual)
|
||||
|
||||
|
||||
class DummySystem(object):
|
||||
def __init__(self):
|
||||
self.methods = ["_listapi", "Dummy", "getRPM", "getRPMChecksums"]
|
||||
|
||||
def listMethods(self):
|
||||
return self.methods
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
@mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
|
||||
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
|
||||
|
@ -166,10 +158,9 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
self.tagged_rpms = json.load(f)
|
||||
|
||||
self.path_info = MockPathInfo(self.topdir)
|
||||
self.koji_downloader = helpers.FSKojiDownloader()
|
||||
|
||||
self.koji_wrapper = mock.Mock()
|
||||
self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = self.tagged_rpms
|
||||
self.koji_wrapper.koji_proxy.system = DummySystem()
|
||||
self.koji_wrapper.koji_module.pathinfo = self.path_info
|
||||
|
||||
def _touch_files(self, filenames):
|
||||
|
@ -200,9 +191,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
]
|
||||
)
|
||||
|
||||
pkgset = package_set(
|
||||
"pkgset", self.koji_wrapper, [None], downloader=self.koji_downloader
|
||||
)
|
||||
pkgset = package_set("pkgset", self.koji_wrapper, [None])
|
||||
|
||||
result = pkgset.populate("f25")
|
||||
|
||||
|
@ -240,11 +229,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
)
|
||||
|
||||
pkgset = package_set(
|
||||
"pkgset",
|
||||
self.koji_wrapper,
|
||||
[None],
|
||||
arches=["x86_64"],
|
||||
downloader=self.koji_downloader,
|
||||
"pkgset", self.koji_wrapper, [None], arches=["x86_64"]
|
||||
)
|
||||
|
||||
result = pkgset.populate("f25")
|
||||
|
@ -274,11 +259,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
)
|
||||
|
||||
pkgset = pkgsets.KojiPackageSet(
|
||||
"pkgset",
|
||||
self.koji_wrapper,
|
||||
["cafebabe", "deadbeef"],
|
||||
arches=["x86_64"],
|
||||
downloader=self.koji_downloader,
|
||||
"pkgset", self.koji_wrapper, ["cafebabe", "deadbeef"], arches=["x86_64"]
|
||||
)
|
||||
|
||||
result = pkgset.populate("f25")
|
||||
|
@ -307,11 +288,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
)
|
||||
|
||||
pkgset = pkgsets.KojiPackageSet(
|
||||
"pkgset",
|
||||
self.koji_wrapper,
|
||||
["cafebabe", None],
|
||||
arches=["x86_64"],
|
||||
downloader=self.koji_downloader,
|
||||
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
|
||||
)
|
||||
|
||||
result = pkgset.populate("f25")
|
||||
|
@ -333,11 +310,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
|
||||
def test_can_not_find_signed_package(self):
|
||||
pkgset = pkgsets.KojiPackageSet(
|
||||
"pkgset",
|
||||
self.koji_wrapper,
|
||||
["cafebabe"],
|
||||
arches=["x86_64"],
|
||||
downloader=self.koji_downloader,
|
||||
"pkgset", self.koji_wrapper, ["cafebabe"], arches=["x86_64"]
|
||||
)
|
||||
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
|
@ -376,7 +349,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
arches=["x86_64"],
|
||||
signed_packages_retries=2,
|
||||
signed_packages_wait=5,
|
||||
downloader=self.koji_downloader,
|
||||
)
|
||||
|
||||
result = pkgset.populate("f25")
|
||||
|
@ -414,7 +386,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
["cafebabe"],
|
||||
arches=["x86_64"],
|
||||
allow_invalid_sigkeys=True,
|
||||
downloader=self.koji_downloader,
|
||||
)
|
||||
|
||||
pkgset.populate("f25")
|
||||
|
@ -435,11 +406,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
|
||||
def test_can_not_find_any_package(self):
|
||||
pkgset = pkgsets.KojiPackageSet(
|
||||
"pkgset",
|
||||
self.koji_wrapper,
|
||||
["cafebabe", None],
|
||||
arches=["x86_64"],
|
||||
downloader=self.koji_downloader,
|
||||
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
|
||||
)
|
||||
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
|
@ -464,7 +431,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
arches=["x86_64"],
|
||||
signed_packages_retries=2,
|
||||
signed_packages_wait=5,
|
||||
downloader=self.koji_downloader,
|
||||
)
|
||||
|
||||
with self.assertRaises(RuntimeError) as ctx:
|
||||
|
@ -505,7 +471,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
[None],
|
||||
packages=["bash"],
|
||||
populate_only_packages=True,
|
||||
downloader=self.koji_downloader,
|
||||
)
|
||||
|
||||
result = pkgset.populate("f25")
|
||||
|
@ -614,7 +579,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
[None],
|
||||
arches=["x86_64"],
|
||||
cache_region=cache_region,
|
||||
downloader=self.koji_downloader,
|
||||
)
|
||||
|
||||
# Try calling the populate twice, but expect just single listTaggedRPMs
|
||||
|
@ -658,7 +622,6 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
[None],
|
||||
arches=["x86_64"],
|
||||
cache_region=cache_region,
|
||||
downloader=self.koji_downloader,
|
||||
)
|
||||
|
||||
# Try calling the populate twice with different event id. It must not
|
||||
|
@ -716,11 +679,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||
]
|
||||
|
||||
pkgset = package_set(
|
||||
"pkgset",
|
||||
self.koji_wrapper,
|
||||
[None],
|
||||
extra_builds=["pungi-4.1.3-3.fc25"],
|
||||
downloader=self.koji_downloader,
|
||||
"pkgset", self.koji_wrapper, [None], extra_builds=["pungi-4.1.3-3.fc25"]
|
||||
)
|
||||
|
||||
result = pkgset.populate("f25")
|
||||
|
@ -932,8 +891,6 @@ class TestReuseKojiPkgset(helpers.PungiTestCase):
|
|||
"include_packages": None,
|
||||
"rpms_by_arch": mock.Mock(),
|
||||
"srpms_by_name": mock.Mock(),
|
||||
"inherit_to_noarch": True,
|
||||
"exclusive_noarch": True,
|
||||
}
|
||||
)
|
||||
self.pkgset.old_file_cache = mock.Mock()
|
||||
|
@ -1143,8 +1100,6 @@ class TestReuseKojiMockPkgset(helpers.PungiTestCase):
|
|||
"include_packages": None,
|
||||
"rpms_by_arch": mock.Mock(),
|
||||
"srpms_by_name": mock.Mock(),
|
||||
"exclusive_noarch": True,
|
||||
"inherit_to_noarch": True,
|
||||
}
|
||||
)
|
||||
self.pkgset.old_file_cache = mock.Mock()
|
||||
|
@ -1226,28 +1181,6 @@ class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase):
|
|||
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
|
||||
)
|
||||
|
||||
def test_merge_doesnt_exclude_noarch_exclude_arch_when_configured(self):
|
||||
first = pkgsets.PackageSetBase("first", [None])
|
||||
second = pkgsets.PackageSetBase("second", [None])
|
||||
|
||||
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
|
||||
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
|
||||
|
||||
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
|
||||
pkg.excludearch = ["i686"]
|
||||
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
|
||||
|
||||
first.merge(second, "i386", ["i686", "noarch"], inherit_to_noarch=False)
|
||||
|
||||
print(first.rpms_by_arch)
|
||||
self.assertPkgsetEqual(
|
||||
first.rpms_by_arch,
|
||||
{
|
||||
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
|
||||
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
|
||||
},
|
||||
)
|
||||
|
||||
def test_merge_excludes_noarch_exclusive_arch(self):
|
||||
first = pkgsets.PackageSetBase("first", [None])
|
||||
second = pkgsets.PackageSetBase("second", [None])
|
||||
|
|
|
@ -79,8 +79,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||
def setUp(self):
|
||||
super(TestPopulateGlobalPkgset, self).setUp()
|
||||
self.compose = helpers.DummyCompose(
|
||||
self.topdir,
|
||||
{"pkgset_koji_tag": "f25", "sigkeys": ["foo", "bar"], "koji_cache": "/tmp"},
|
||||
self.topdir, {"pkgset_koji_tag": "f25", "sigkeys": ["foo", "bar"]}
|
||||
)
|
||||
self.koji_wrapper = mock.Mock()
|
||||
self.pkgset_path = os.path.join(
|
||||
|
@ -99,7 +98,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||
orig_pkgset = KojiPackageSet.return_value
|
||||
|
||||
pkgsets = source_koji.populate_global_pkgset(
|
||||
self.compose, self.koji_wrapper, 123456
|
||||
self.compose, self.koji_wrapper, "/prefix", 123456
|
||||
)
|
||||
|
||||
self.assertEqual(len(pkgsets), 1)
|
||||
|
@ -118,11 +117,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||
def test_populate_with_multiple_koji_tags(self, KojiPackageSet, materialize):
|
||||
self.compose = helpers.DummyCompose(
|
||||
self.topdir,
|
||||
{
|
||||
"pkgset_koji_tag": ["f25", "f25-extra"],
|
||||
"sigkeys": ["foo", "bar"],
|
||||
"koji_cache": "/tmp",
|
||||
},
|
||||
{"pkgset_koji_tag": ["f25", "f25-extra"], "sigkeys": ["foo", "bar"]},
|
||||
)
|
||||
|
||||
materialize.side_effect = self.mock_materialize
|
||||
|
@ -130,7 +125,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||
KojiPackageSet.return_value.reuse = None
|
||||
|
||||
pkgsets = source_koji.populate_global_pkgset(
|
||||
self.compose, self.koji_wrapper, 123456
|
||||
self.compose, self.koji_wrapper, "/prefix", 123456
|
||||
)
|
||||
|
||||
self.assertEqual(len(pkgsets), 2)
|
||||
|
@ -165,7 +160,6 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||
{
|
||||
"gather_method": "nodeps",
|
||||
"pkgset_koji_tag": "f25",
|
||||
"koji_cache": "/tmp",
|
||||
"sigkeys": ["foo", "bar"],
|
||||
"additional_packages": [(".*", {"*": ["pkg", "foo.x86_64"]})],
|
||||
},
|
||||
|
@ -174,7 +168,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||
materialize.side_effect = self.mock_materialize
|
||||
|
||||
pkgsets = source_koji.populate_global_pkgset(
|
||||
self.compose, self.koji_wrapper, 123456
|
||||
self.compose, self.koji_wrapper, "/prefix", 123456
|
||||
)
|
||||
self.assertEqual(len(pkgsets), 1)
|
||||
six.assertCountEqual(self, pkgsets[0].packages, ["pkg", "foo"])
|
||||
|
@ -183,9 +177,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||
class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
||||
def setUp(self):
|
||||
super(TestGetPackageSetFromKoji, self).setUp()
|
||||
self.compose = helpers.DummyCompose(
|
||||
self.topdir, {"pkgset_koji_tag": "f25", "koji_cache": "/tmp"}
|
||||
)
|
||||
self.compose = helpers.DummyCompose(self.topdir, {"pkgset_koji_tag": "f25"})
|
||||
self.compose.koji_event = None
|
||||
self.koji_wrapper = mock.Mock()
|
||||
self.koji_wrapper.koji_proxy.getLastEvent.return_value = EVENT_INFO
|
||||
|
@ -193,7 +185,9 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||
|
||||
@mock.patch("pungi.phases.pkgset.sources.source_koji.populate_global_pkgset")
|
||||
def test_get_package_sets(self, pgp):
|
||||
pkgsets = source_koji.get_pkgset_from_koji(self.compose, self.koji_wrapper)
|
||||
pkgsets = source_koji.get_pkgset_from_koji(
|
||||
self.compose, self.koji_wrapper, "/prefix"
|
||||
)
|
||||
|
||||
six.assertCountEqual(
|
||||
self, self.koji_wrapper.koji_proxy.mock_calls, [mock.call.getLastEvent()]
|
||||
|
@ -202,7 +196,7 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||
|
||||
self.assertEqual(
|
||||
pgp.call_args_list,
|
||||
[mock.call(self.compose, self.koji_wrapper, EVENT_INFO)],
|
||||
[mock.call(self.compose, self.koji_wrapper, "/prefix", EVENT_INFO)],
|
||||
)
|
||||
|
||||
def test_get_koji_modules(self):
|
||||
|
@ -452,16 +446,14 @@ class TestSourceKoji(helpers.PungiTestCase):
|
|||
@mock.patch("pungi.phases.pkgset.sources.source_koji.get_pkgset_from_koji")
|
||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||
def test_run(self, KojiWrapper, gpfk):
|
||||
compose = helpers.DummyCompose(
|
||||
self.topdir, {"koji_profile": "koji", "koji_cache": "/prefix"}
|
||||
)
|
||||
compose = helpers.DummyCompose(self.topdir, {"koji_profile": "koji"})
|
||||
KojiWrapper.return_value.koji_module.config.topdir = "/prefix"
|
||||
|
||||
phase = source_koji.PkgsetSourceKoji(compose)
|
||||
pkgsets, path_prefix = phase()
|
||||
|
||||
self.assertEqual(pkgsets, gpfk.return_value)
|
||||
self.assertEqual(path_prefix, "/prefix")
|
||||
self.assertEqual(path_prefix, "/prefix/")
|
||||
self.assertEqual(KojiWrapper.mock_calls, [mock.call(compose)])
|
||||
|
||||
|
||||
|
@ -693,7 +685,6 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||
def setUp(self):
|
||||
super(TestAddModuleToVariant, self).setUp()
|
||||
self.koji = mock.Mock()
|
||||
self.compose = helpers.DummyCompose(self.topdir, {})
|
||||
self.koji.koji_module.pathinfo.typedir.return_value = MMDS_DIR
|
||||
files = ["modulemd.x86_64.txt", "modulemd.armv7hl.txt", "modulemd.txt"]
|
||||
self.koji.koji_proxy.listArchives.return_value = [
|
||||
|
@ -716,9 +707,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||
def test_adding_module(self):
|
||||
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
|
||||
|
||||
source_koji._add_module_to_variant(
|
||||
self.koji, variant, self.buildinfo, compose=self.compose
|
||||
)
|
||||
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
|
||||
|
||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||
self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
|
||||
|
@ -740,9 +729,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
|
||||
)
|
||||
|
||||
source_koji._add_module_to_variant(
|
||||
self.koji, variant, self.buildinfo, compose=self.compose
|
||||
)
|
||||
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
|
||||
|
||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||
self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
|
||||
|
@ -759,11 +746,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
|
||||
|
||||
source_koji._add_module_to_variant(
|
||||
self.koji,
|
||||
variant,
|
||||
self.buildinfo,
|
||||
compose=self.compose,
|
||||
add_to_variant_modules=True,
|
||||
self.koji, variant, self.buildinfo, add_to_variant_modules=True
|
||||
)
|
||||
|
||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||
|
@ -789,11 +772,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||
)
|
||||
|
||||
source_koji._add_module_to_variant(
|
||||
self.koji,
|
||||
variant,
|
||||
self.buildinfo,
|
||||
compose=self.compose,
|
||||
add_to_variant_modules=True,
|
||||
self.koji, variant, self.buildinfo, add_to_variant_modules=True
|
||||
)
|
||||
|
||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||
|
@ -839,7 +818,6 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
|
|||
super(TestAddModuleToVariantForKojiMock, self).setUp()
|
||||
self.koji = mock.Mock()
|
||||
self.koji.koji_module.pathinfo.typedir.return_value = "/koji"
|
||||
self.compose = helpers.DummyCompose(self.topdir, {})
|
||||
self.koji.koji_module.pathinfo.topdir = MMDS_DIR
|
||||
files = [
|
||||
"modulemd.x86_64.txt",
|
||||
|
@ -898,10 +876,7 @@ class TestAddModuleToVariantForKojiMock(helpers.PungiTestCase):
|
|||
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
|
||||
)
|
||||
|
||||
|
||||
source_koji._add_module_to_variant(
|
||||
self.koji, variant, self.buildinfo, compose=self.compose
|
||||
)
|
||||
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
|
||||
|
||||
mod = variant.arch_mmds["x86_64"]["m1:latest:20190101:cafe"]
|
||||
self.assertEqual(mod.get_NSVCA(), "m1:latest:20190101:cafe:x86_64")
|
||||
|
|
|
@ -13,10 +13,8 @@ import random
|
|||
import os
|
||||
import six
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from pungi.wrappers import scm
|
||||
from tests.helpers import touch, GIT_WITH_CREDS
|
||||
from tests.helpers import touch
|
||||
from kobo.shortcuts import run
|
||||
|
||||
|
||||
|
@ -111,49 +109,37 @@ class FileSCMTestCase(SCMBaseTest):
|
|||
self.assertIn("No directories matched", str(ctx.exception))
|
||||
|
||||
|
||||
CREDENTIALS_CONFIG = {"credential_helper": "!ch"}
|
||||
|
||||
|
||||
class GitSCMTestCase(SCMBaseTest):
|
||||
def tearDown(self):
|
||||
shutil.rmtree("/tmp/pungi-temp-git-repos-%s" % os.getpid())
|
||||
super(GitSCMTestCase, self).tearDown()
|
||||
|
||||
def assertCalls(self, mock_run, url, branch, command=None, with_creds=False):
|
||||
git = GIT_WITH_CREDS if with_creds else ["git"]
|
||||
def assertCalls(self, mock_run, url, branch, command=None):
|
||||
command = [command] if command else []
|
||||
self.assertEqual(
|
||||
[call[0][0] for call in mock_run.call_args_list],
|
||||
[
|
||||
["git", "init"],
|
||||
git + ["fetch", "--depth=1", url, branch],
|
||||
["git", "fetch", "--depth=1", url, branch],
|
||||
["git", "checkout", "FETCH_HEAD"],
|
||||
]
|
||||
+ command,
|
||||
)
|
||||
|
||||
@parameterized.expand([("without_creds", {}), ("with_creds", CREDENTIALS_CONFIG)])
|
||||
def test_get_file(self, _name, config):
|
||||
@mock.patch("pungi.wrappers.scm.run")
|
||||
def test_get_file(self, run):
|
||||
def process(cmd, workdir=None, **kwargs):
|
||||
touch(os.path.join(workdir, "some_file.txt"))
|
||||
touch(os.path.join(workdir, "other_file.txt"))
|
||||
|
||||
with mock.patch("pungi.wrappers.scm.run") as run:
|
||||
run.side_effect = process
|
||||
retval = scm.get_file_from_scm(
|
||||
{
|
||||
"scm": "git",
|
||||
"repo": "git://example.com/git/repo.git",
|
||||
"file": "some_file.txt",
|
||||
"options": config,
|
||||
},
|
||||
self.destdir,
|
||||
)
|
||||
run.side_effect = process
|
||||
|
||||
self.assertStructure(retval, ["some_file.txt"])
|
||||
self.assertCalls(
|
||||
run, "git://example.com/git/repo.git", "master", with_creds=bool(config)
|
||||
retval = scm.get_file_from_scm(
|
||||
{
|
||||
"scm": "git",
|
||||
"repo": "git://example.com/git/repo.git",
|
||||
"file": "some_file.txt",
|
||||
},
|
||||
self.destdir,
|
||||
)
|
||||
self.assertStructure(retval, ["some_file.txt"])
|
||||
self.assertCalls(run, "git://example.com/git/repo.git", "master")
|
||||
|
||||
@mock.patch("pungi.wrappers.scm.run")
|
||||
def test_get_file_function(self, run):
|
||||
|
@ -177,10 +163,9 @@ class GitSCMTestCase(SCMBaseTest):
|
|||
self.assertEqual(retval, destination)
|
||||
self.assertCalls(run, "git://example.com/git/repo.git", "master")
|
||||
|
||||
@parameterized.expand([("without_creds", {}), ("with_creds", CREDENTIALS_CONFIG)])
|
||||
def test_get_file_fetch_fails(self, _name, config):
|
||||
@mock.patch("pungi.wrappers.scm.run")
|
||||
def test_get_file_fetch_fails(self, run):
|
||||
url = "git://example.com/git/repo.git"
|
||||
git = GIT_WITH_CREDS if config else ["git"]
|
||||
|
||||
def process(cmd, workdir=None, **kwargs):
|
||||
if "fetch" in cmd:
|
||||
|
@ -190,20 +175,18 @@ class GitSCMTestCase(SCMBaseTest):
|
|||
touch(os.path.join(workdir, "some_file.txt"))
|
||||
touch(os.path.join(workdir, "other_file.txt"))
|
||||
|
||||
with mock.patch("pungi.wrappers.scm.run") as run:
|
||||
run.side_effect = process
|
||||
retval = scm.get_file_from_scm(
|
||||
{"scm": "git", "repo": url, "file": "some_file.txt", "options": config},
|
||||
self.destdir,
|
||||
)
|
||||
run.side_effect = process
|
||||
|
||||
retval = scm.get_file_from_scm(
|
||||
{"scm": "git", "repo": url, "file": "some_file.txt"}, self.destdir
|
||||
)
|
||||
self.assertStructure(retval, ["some_file.txt"])
|
||||
self.assertEqual(
|
||||
[call[0][0] for call in run.call_args_list],
|
||||
[
|
||||
["git", "init"],
|
||||
git
|
||||
+ [
|
||||
[
|
||||
"git",
|
||||
"fetch",
|
||||
"--depth=1",
|
||||
"git://example.com/git/repo.git",
|
||||
|
@ -211,7 +194,7 @@ class GitSCMTestCase(SCMBaseTest):
|
|||
],
|
||||
["git", "init"],
|
||||
["git", "remote", "add", "origin", url],
|
||||
git + ["remote", "update", "origin"],
|
||||
["git", "remote", "update", "origin"],
|
||||
["git", "checkout", "master"],
|
||||
],
|
||||
)
|
||||
|
@ -260,28 +243,20 @@ class GitSCMTestCase(SCMBaseTest):
|
|||
|
||||
self.assertEqual(str(ctx.exception), "'make' failed with exit code 1")
|
||||
|
||||
@parameterized.expand([("without_creds", {}), ("with_creds", CREDENTIALS_CONFIG)])
|
||||
def test_get_dir(self, _name, config):
|
||||
@mock.patch("pungi.wrappers.scm.run")
|
||||
def test_get_dir(self, run):
|
||||
def process(cmd, workdir=None, **kwargs):
|
||||
touch(os.path.join(workdir, "subdir", "first"))
|
||||
touch(os.path.join(workdir, "subdir", "second"))
|
||||
|
||||
with mock.patch("pungi.wrappers.scm.run") as run:
|
||||
run.side_effect = process
|
||||
retval = scm.get_dir_from_scm(
|
||||
{
|
||||
"scm": "git",
|
||||
"repo": "git://example.com/git/repo.git",
|
||||
"dir": "subdir",
|
||||
"options": config,
|
||||
},
|
||||
self.destdir,
|
||||
)
|
||||
run.side_effect = process
|
||||
|
||||
self.assertStructure(retval, ["first", "second"])
|
||||
self.assertCalls(
|
||||
run, "git://example.com/git/repo.git", "master", with_creds=bool(config)
|
||||
retval = scm.get_dir_from_scm(
|
||||
{"scm": "git", "repo": "git://example.com/git/repo.git", "dir": "subdir"},
|
||||
self.destdir,
|
||||
)
|
||||
self.assertStructure(retval, ["first", "second"])
|
||||
self.assertCalls(run, "git://example.com/git/repo.git", "master")
|
||||
|
||||
@mock.patch("pungi.wrappers.scm.run")
|
||||
def test_get_dir_and_generate(self, run):
|
||||
|
|
|
@ -16,7 +16,7 @@ import six
|
|||
from pungi import compose
|
||||
from pungi import util
|
||||
|
||||
from tests.helpers import touch, PungiTestCase, mk_boom, GIT_WITH_CREDS
|
||||
from tests.helpers import touch, PungiTestCase, mk_boom
|
||||
|
||||
|
||||
class TestGitRefResolver(unittest.TestCase):
|
||||
|
@ -32,20 +32,6 @@ class TestGitRefResolver(unittest.TestCase):
|
|||
universal_newlines=True,
|
||||
)
|
||||
|
||||
@mock.patch("pungi.util.run")
|
||||
def test_successful_resolve_with_credentials(self, run):
|
||||
run.return_value = (0, "CAFEBABE\tHEAD\n")
|
||||
|
||||
url = util.resolve_git_url(
|
||||
"https://git.example.com/repo.git?somedir#HEAD", "!ch"
|
||||
)
|
||||
|
||||
self.assertEqual(url, "https://git.example.com/repo.git?somedir#CAFEBABE")
|
||||
run.assert_called_once_with(
|
||||
GIT_WITH_CREDS + ["ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
@mock.patch("pungi.util.run")
|
||||
def test_successful_resolve_branch(self, run):
|
||||
run.return_value = (0, "CAFEBABE\trefs/heads/f24\n")
|
||||
|
@ -225,12 +211,11 @@ class TestGitRefResolver(unittest.TestCase):
|
|||
self.assertEqual(resolver(url2), "2")
|
||||
self.assertEqual(resolver(url3, ref2), "beef")
|
||||
self.assertEqual(
|
||||
mock_resolve_url.call_args_list,
|
||||
[mock.call(url1, None), mock.call(url2, None)],
|
||||
mock_resolve_url.call_args_list, [mock.call(url1), mock.call(url2)]
|
||||
)
|
||||
self.assertEqual(
|
||||
mock_resolve_ref.call_args_list,
|
||||
[mock.call(url3, ref1, None), mock.call(url3, ref2, None)],
|
||||
[mock.call(url3, ref1), mock.call(url3, ref2)],
|
||||
)
|
||||
|
||||
@mock.patch("pungi.util.resolve_git_url")
|
||||
|
@ -242,7 +227,7 @@ class TestGitRefResolver(unittest.TestCase):
|
|||
resolver(url)
|
||||
with self.assertRaises(util.GitUrlResolveError):
|
||||
resolver(url)
|
||||
self.assertEqual(mock_resolve.call_args_list, [mock.call(url, None)])
|
||||
self.assertEqual(mock_resolve.call_args_list, [mock.call(url)])
|
||||
|
||||
|
||||
class TestGetVariantData(unittest.TestCase):
|
||||
|
|
Loading…
Reference in New Issue