Compare commits
66 Commits
master
...
centos_mas
Author | SHA1 | Date | |
---|---|---|---|
|
aa7fcc1c20 | ||
|
b32c8f3e5e | ||
|
935da7c246 | ||
|
b513c8cd00 | ||
|
8cf1d98312 | ||
|
2534ddee99 | ||
|
f30a8b4d15 | ||
|
3ffb991bac | ||
|
dbc0e531b2 | ||
|
4c7611291d | ||
|
0d3cd150bd | ||
|
aa0aae3d3e | ||
|
77f8fa25ad | ||
|
e6d9f31ef4 | ||
|
bf3e9bc53a | ||
|
631bb01d8f | ||
|
b6296bdfcd | ||
|
1c4275bbfa | ||
|
fe2dad3b3c | ||
|
7128021654 | ||
|
bd64894a03 | ||
|
14e025a5a1 | ||
|
ada8f4e346 | ||
|
e4c525ecbf | ||
|
091d228219 | ||
|
bcc440491e | ||
|
fa50eedfad | ||
|
b7adbf8a91 | ||
|
82ae9e86d5 | ||
|
2ad341a01c | ||
|
e888e76992 | ||
|
6e72de7efe | ||
|
c8263fcd39 | ||
|
82ca4f4e65 | ||
|
b8b6b46ce7 | ||
|
e9d836c115 | ||
|
d3f0701e01 | ||
|
8f6f0f463f | ||
|
467c7a7f6a | ||
|
e1d7544c2b | ||
|
a71c8e23be | ||
|
ab508c1511 | ||
|
f960b4d155 | ||
|
602b698080 | ||
|
b30f7e0d83 | ||
|
0c3b6e22f9 | ||
|
3175ede38a | ||
|
8920eef339 | ||
|
58036eab84 | ||
|
a4476f2570 | ||
|
8c06b7a3f1 | ||
|
64ae81b416 | ||
|
826169af7c | ||
|
d97b8bdd33 | ||
|
8768b23cbe | ||
|
51628a974d | ||
|
88327d5784 | ||
|
6e0a9385f2 | ||
|
8be0d84f8a | ||
|
8f0906be53 | ||
|
e3072c3d5f | ||
|
ef6d40dce4 | ||
|
df6664098d | ||
|
147df93f75 | ||
|
dd8c1002d4 | ||
|
12e3a46390 |
@ -2,6 +2,7 @@ include AUTHORS
|
|||||||
include COPYING
|
include COPYING
|
||||||
include GPL
|
include GPL
|
||||||
include pungi.spec
|
include pungi.spec
|
||||||
|
include setup.cfg
|
||||||
include tox.ini
|
include tox.ini
|
||||||
include share/*
|
include share/*
|
||||||
include share/multilib/*
|
include share/multilib/*
|
||||||
|
1
TODO
1
TODO
@ -47,7 +47,6 @@ Split Pungi into smaller well-defined tools
|
|||||||
|
|
||||||
* create install images
|
* create install images
|
||||||
* lorax
|
* lorax
|
||||||
* buildinstall
|
|
||||||
|
|
||||||
* create isos
|
* create isos
|
||||||
* isos
|
* isos
|
||||||
|
2
contrib/tmpfiles.d/pungi-clean-cache.conf
Normal file
2
contrib/tmpfiles.d/pungi-clean-cache.conf
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# Clean up pungi cache
|
||||||
|
d /var/cache/pungi/createrepo_c/ - - - 30d
|
142
doc/conf.py
142
doc/conf.py
@ -18,12 +18,12 @@ import os
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
#sys.path.insert(0, os.path.abspath('.'))
|
# sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
#needs_sphinx = '1.0'
|
# needs_sphinx = '1.0'
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
@ -31,207 +31,201 @@ import os
|
|||||||
extensions = []
|
extensions = []
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
#source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'Pungi'
|
project = "Pungi"
|
||||||
copyright = u'2016, Red Hat, Inc.'
|
copyright = "2016, Red Hat, Inc."
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = '4.3'
|
version = "4.5"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = '4.3.6'
|
release = "4.5.1"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#language = None
|
# language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
#today = ''
|
# today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
#today_fmt = '%B %d, %Y'
|
# today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
#default_role = None
|
# default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
#add_function_parentheses = True
|
# add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
#add_module_names = True
|
# add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
#show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
#modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
|
|
||||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
#keep_warnings = False
|
# keep_warnings = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
html_theme = 'default'
|
html_theme = "default"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
#html_theme_options = {}
|
# html_theme_options = {}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
#html_theme_path = []
|
# html_theme_path = []
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
#html_title = None
|
# html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
#html_short_title = None
|
# html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
#html_logo = None
|
# html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
#html_favicon = None
|
# html_favicon = None
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
# directly to the root of the documentation.
|
# directly to the root of the documentation.
|
||||||
#html_extra_path = []
|
# html_extra_path = []
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
#html_last_updated_fmt = '%b %d, %Y'
|
# html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
#html_use_smartypants = True
|
# html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
#html_sidebars = {}
|
# html_sidebars = {}
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
#html_additional_pages = {}
|
# html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#html_domain_indices = True
|
# html_domain_indices = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
#html_use_index = True
|
# html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
#html_split_index = False
|
# html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
#html_show_sourcelink = True
|
# html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
#html_show_sphinx = True
|
# html_show_sphinx = True
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
#html_show_copyright = True
|
# html_show_copyright = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
#html_use_opensearch = ''
|
# html_use_opensearch = ''
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
#html_file_suffix = None
|
# html_file_suffix = None
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'Pungidoc'
|
htmlhelp_basename = "Pungidoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#'papersize': 'letterpaper',
|
#'papersize': 'letterpaper',
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
#'pointsize': '10pt',
|
||||||
#'pointsize': '10pt',
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
#'preamble': '',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index', 'Pungi.tex', u'Pungi Documentation',
|
("index", "Pungi.tex", "Pungi Documentation", "Daniel Mach", "manual"),
|
||||||
u'Daniel Mach', 'manual'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
#latex_logo = None
|
# latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
#latex_use_parts = False
|
# latex_use_parts = False
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
# If true, show page references after internal links.
|
||||||
#latex_show_pagerefs = False
|
# latex_show_pagerefs = False
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#latex_show_urls = False
|
# latex_show_urls = False
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#latex_appendices = []
|
# latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_domain_indices = True
|
# latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output ---------------------------------------
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [("index", "pungi", "Pungi Documentation", ["Daniel Mach"], 1)]
|
||||||
('index', 'pungi', u'Pungi Documentation',
|
|
||||||
[u'Daniel Mach'], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#man_show_urls = False
|
# man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output -------------------------------------------
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
@ -240,19 +234,25 @@ man_pages = [
|
|||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
('index', 'Pungi', u'Pungi Documentation',
|
(
|
||||||
u'Daniel Mach', 'Pungi', 'One line description of project.',
|
"index",
|
||||||
'Miscellaneous'),
|
"Pungi",
|
||||||
|
"Pungi Documentation",
|
||||||
|
"Daniel Mach",
|
||||||
|
"Pungi",
|
||||||
|
"One line description of project.",
|
||||||
|
"Miscellaneous",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#texinfo_appendices = []
|
# texinfo_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#texinfo_domain_indices = True
|
# texinfo_domain_indices = True
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
#texinfo_show_urls = 'footnote'
|
# texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
#texinfo_no_detailmenu = False
|
# texinfo_no_detailmenu = False
|
||||||
|
@ -194,6 +194,17 @@ Options
|
|||||||
Tracking Service Kerberos authentication. If not defined, the default
|
Tracking Service Kerberos authentication. If not defined, the default
|
||||||
Kerberos principal is used.
|
Kerberos principal is used.
|
||||||
|
|
||||||
|
**cts_oidc_token_url**
|
||||||
|
(*str*) -- URL to the OIDC token endpoint.
|
||||||
|
For example ``https://oidc.example.com/openid-connect/token``.
|
||||||
|
This option can be overridden by the environment variable ``CTS_OIDC_TOKEN_URL``.
|
||||||
|
|
||||||
|
**cts_oidc_client_id*
|
||||||
|
(*str*) -- OIDC client ID.
|
||||||
|
This option can be overridden by the environment variable ``CTS_OIDC_CLIENT_ID``.
|
||||||
|
Note that environment variable ``CTS_OIDC_CLIENT_SECRET`` must be configured with
|
||||||
|
corresponding client secret to authenticate to CTS via OIDC.
|
||||||
|
|
||||||
**compose_type**
|
**compose_type**
|
||||||
(*str*) -- Allows to set default compose type. Type set via a command-line
|
(*str*) -- Allows to set default compose type. Type set via a command-line
|
||||||
option overwrites this.
|
option overwrites this.
|
||||||
@ -581,6 +592,16 @@ Options
|
|||||||
with everything. Set this option to ``False`` to ignore ``noarch`` in
|
with everything. Set this option to ``False`` to ignore ``noarch`` in
|
||||||
``ExclusiveArch`` and always consider only binary architectures.
|
``ExclusiveArch`` and always consider only binary architectures.
|
||||||
|
|
||||||
|
**pkgset_inherit_exclusive_arch_to_noarch** = True
|
||||||
|
(*bool*) -- When set to ``True``, the value of ``ExclusiveArch`` or
|
||||||
|
``ExcludeArch`` will be copied from source rpm to all its noarch packages.
|
||||||
|
That will than limit which architectures the noarch packages can be
|
||||||
|
included in.
|
||||||
|
|
||||||
|
By setting this option to ``False`` this step is skipped, and noarch
|
||||||
|
packages will by default land in all architectures. They can still be
|
||||||
|
excluded by listing them in a relevant section of ``filter_packages``.
|
||||||
|
|
||||||
**pkgset_allow_reuse** = True
|
**pkgset_allow_reuse** = True
|
||||||
(*bool*) -- When set to ``True``, *Pungi* will try to reuse pkgset data
|
(*bool*) -- When set to ``True``, *Pungi* will try to reuse pkgset data
|
||||||
from the old composes specified by ``--old-composes``. When enabled, this
|
from the old composes specified by ``--old-composes``. When enabled, this
|
||||||
@ -621,7 +642,7 @@ Options
|
|||||||
-------
|
-------
|
||||||
|
|
||||||
**buildinstall_method**
|
**buildinstall_method**
|
||||||
(*str*) -- "lorax" (f16+, rhel7+) or "buildinstall" (older releases)
|
(*str*) -- "lorax" (f16+, rhel7+)
|
||||||
**lorax_options**
|
**lorax_options**
|
||||||
(*list*) -- special options passed on to *lorax*.
|
(*list*) -- special options passed on to *lorax*.
|
||||||
|
|
||||||
@ -920,6 +941,10 @@ Options
|
|||||||
comps file can not be found in the package set. When disabled (the
|
comps file can not be found in the package set. When disabled (the
|
||||||
default), such cases are still reported as warnings in the log.
|
default), such cases are still reported as warnings in the log.
|
||||||
|
|
||||||
|
With ``dnf`` gather backend, this option will abort the compose on any
|
||||||
|
missing package no matter if it's listed in comps, ``additional_packages``
|
||||||
|
or prepopulate file.
|
||||||
|
|
||||||
**gather_source_mapping**
|
**gather_source_mapping**
|
||||||
(*str*) -- JSON mapping with initial packages for the compose. The value
|
(*str*) -- JSON mapping with initial packages for the compose. The value
|
||||||
should be a path to JSON file with following mapping: ``{variant: {arch:
|
should be a path to JSON file with following mapping: ``{variant: {arch:
|
||||||
@ -1607,11 +1632,28 @@ OSBuild Composer for building images
|
|||||||
* ``release`` -- release part of the final NVR. If neither this option nor
|
* ``release`` -- release part of the final NVR. If neither this option nor
|
||||||
the global ``osbuild_release`` is set, Koji will automatically generate a
|
the global ``osbuild_release`` is set, Koji will automatically generate a
|
||||||
value.
|
value.
|
||||||
* ``repo`` -- a list of repository URLs from which to consume packages for
|
* ``repo`` -- a list of repositories from which to consume packages for
|
||||||
building the image. By default only the variant repository is used.
|
building the image. By default only the variant repository is used.
|
||||||
|
The list items may use one of the following formats:
|
||||||
|
|
||||||
|
* String with just the repository URL.
|
||||||
|
|
||||||
|
* Dictionary with the following keys:
|
||||||
|
|
||||||
|
* ``baseurl`` -- URL of the repository.
|
||||||
|
* ``package_sets`` -- a list of package set names to use for this
|
||||||
|
repository. Package sets are an internal concept of Image Builder
|
||||||
|
and are used in image definitions. If specified, the repository is
|
||||||
|
used by Image Builder only for the pipeline with the same name.
|
||||||
|
For example, specifying the ``build`` package set name will make
|
||||||
|
the repository to be used only for the build environment in which
|
||||||
|
the image will be built. (optional)
|
||||||
|
|
||||||
* ``arches`` -- list of architectures for which to build the image. By
|
* ``arches`` -- list of architectures for which to build the image. By
|
||||||
default, the variant arches are used. This option can only restrict it,
|
default, the variant arches are used. This option can only restrict it,
|
||||||
not add a new one.
|
not add a new one.
|
||||||
|
* ``manifest_type`` -- the image type that is put into the manifest by
|
||||||
|
pungi. If not supplied then it is autodetected from the Koji output.
|
||||||
* ``ostree_url`` -- URL of the repository that's used to fetch the parent
|
* ``ostree_url`` -- URL of the repository that's used to fetch the parent
|
||||||
commit from.
|
commit from.
|
||||||
* ``ostree_ref`` -- name of the ostree branch
|
* ``ostree_ref`` -- name of the ostree branch
|
||||||
@ -1641,13 +1683,13 @@ OSBuild Composer for building images
|
|||||||
* ``tenant_id`` -- Azure tenant ID to upload the image to
|
* ``tenant_id`` -- Azure tenant ID to upload the image to
|
||||||
* ``subscription_id`` -- Azure subscription ID to upload the image to
|
* ``subscription_id`` -- Azure subscription ID to upload the image to
|
||||||
* ``resource_group`` -- Azure resource group to upload the image to
|
* ``resource_group`` -- Azure resource group to upload the image to
|
||||||
* ``location`` -- Azure location to upload the image to
|
* ``location`` -- Azure location of the resource group (optional)
|
||||||
* ``image_name`` -- Image name of the uploaded Azure image (optional)
|
* ``image_name`` -- Image name of the uploaded Azure image (optional)
|
||||||
|
|
||||||
* **GCP upload options** -- upload to Google Cloud Platform.
|
* **GCP upload options** -- upload to Google Cloud Platform.
|
||||||
|
|
||||||
* ``region`` -- GCP region to upload the image to
|
* ``region`` -- GCP region to upload the image to
|
||||||
* ``bucket`` -- GCP bucket to upload the image to
|
* ``bucket`` -- GCP bucket to upload the image to (optional)
|
||||||
* ``share_with_accounts`` -- list of GCP accounts to share the image
|
* ``share_with_accounts`` -- list of GCP accounts to share the image
|
||||||
with
|
with
|
||||||
* ``image_name`` -- Image name of the uploaded GCP image (optional)
|
* ``image_name`` -- Image name of the uploaded GCP image (optional)
|
||||||
@ -1764,6 +1806,8 @@ repository with a new commit.
|
|||||||
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
||||||
reference will not be created.
|
reference will not be created.
|
||||||
* ``ostree_ref`` -- (*str*) To override value ``ref`` from ``treefile``.
|
* ``ostree_ref`` -- (*str*) To override value ``ref`` from ``treefile``.
|
||||||
|
* ``runroot_packages`` -- (*list*) A list of additional package names to be
|
||||||
|
installed in the runroot environment in Koji.
|
||||||
|
|
||||||
Example config
|
Example config
|
||||||
--------------
|
--------------
|
||||||
|
@ -19,7 +19,7 @@ Contents:
|
|||||||
scm_support
|
scm_support
|
||||||
messaging
|
messaging
|
||||||
gathering
|
gathering
|
||||||
|
koji
|
||||||
comps
|
comps
|
||||||
contributing
|
contributing
|
||||||
testing
|
testing
|
||||||
multi_compose
|
|
||||||
|
105
doc/koji.rst
Normal file
105
doc/koji.rst
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
======================
|
||||||
|
Getting data from koji
|
||||||
|
======================
|
||||||
|
|
||||||
|
When Pungi is configured to get packages from a Koji tag, it somehow needs to
|
||||||
|
access the actual RPM files.
|
||||||
|
|
||||||
|
Historically, this required the storage used by Koji to be directly available
|
||||||
|
on the host where Pungi was running. This was usually achieved by using NFS for
|
||||||
|
the Koji volume, and mounting it on the compose host.
|
||||||
|
|
||||||
|
The compose could be created directly on the same volume. In such case the
|
||||||
|
packages would be hardlinked, significantly reducing space consumption.
|
||||||
|
|
||||||
|
The compose could also be created on a different storage, in which case the
|
||||||
|
packages would either need to be copied over or symlinked. Using symlinks
|
||||||
|
requires that anything that accesses the compose (e.g. a download server) would
|
||||||
|
also need to mount the Koji volume in the same location.
|
||||||
|
|
||||||
|
There is also a risk with symlinks that the package in Koji can change (due to
|
||||||
|
being resigned for example), which would invalidate composes linking to it.
|
||||||
|
|
||||||
|
|
||||||
|
Using Koji without direct mount
|
||||||
|
===============================
|
||||||
|
|
||||||
|
It is possible now to run a compose from a Koji tag without direct access to
|
||||||
|
Koji storage.
|
||||||
|
|
||||||
|
Pungi can download the packages over HTTP protocol, store them in a local
|
||||||
|
cache, and consume them from there.
|
||||||
|
|
||||||
|
The local cache has similar structure to what is on the Koji volume.
|
||||||
|
|
||||||
|
When Pungi needs some package, it has a path on Koji volume. It will replace
|
||||||
|
the ``topdir`` with the cache location. If such file exists, it will be used.
|
||||||
|
If it doesn't exist, it will be downloaded from Koji (by replacing the
|
||||||
|
``topdir`` with ``topurl``).
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Koji path /mnt/koji/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||||
|
Koji URL https://kojipkgs.fedoraproject.org/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||||
|
Local path /mnt/compose/cache/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||||
|
|
||||||
|
The packages can be hardlinked from this cache directory.
|
||||||
|
|
||||||
|
|
||||||
|
Cleanup
|
||||||
|
-------
|
||||||
|
|
||||||
|
While the approach above allows each RPM to be downloaded only once, it will
|
||||||
|
eventually result in the Koji volume being mirrored locally. Most of the
|
||||||
|
packages will however no longer be needed.
|
||||||
|
|
||||||
|
There is a script ``pungi-cache-cleanup`` that can help with that. It can find
|
||||||
|
and remove files from the cache that are no longer needed.
|
||||||
|
|
||||||
|
A file is no longer needed if it has a single link (meaning it is only in the
|
||||||
|
cache, not in any compose), and it has mtime older than a given threshold.
|
||||||
|
|
||||||
|
It doesn't make sense to delete files that are hardlinked in an existing
|
||||||
|
compose as it would not save any space anyway.
|
||||||
|
|
||||||
|
The mtime check is meant to preserve files that are downloaded but not actually
|
||||||
|
used in a compose, like a subpackage that is not included in any variant. Every
|
||||||
|
time its existence in the local cache is checked, the mtime is updated.
|
||||||
|
|
||||||
|
|
||||||
|
Race conditions?
|
||||||
|
----------------
|
||||||
|
|
||||||
|
It should be safe to have multiple compose hosts share the same storage volume
|
||||||
|
for generated composes and local cache.
|
||||||
|
|
||||||
|
If a cache file is accessed and it exists, there's no risk of race condition.
|
||||||
|
|
||||||
|
If two composes need the same file at the same time and it is not present yet,
|
||||||
|
one of them will take a lock on it and start downloading. The other will wait
|
||||||
|
until the download is finished.
|
||||||
|
|
||||||
|
The lock is only valid for a set amount of time (5 minutes) to avoid issues
|
||||||
|
where the downloading process is killed in a way that blocks it from releasing
|
||||||
|
the lock.
|
||||||
|
|
||||||
|
If the file is large and network slow, the limit may not be enough finish
|
||||||
|
downloading. In that case the second process will steal the lock while the
|
||||||
|
first process is still downloading. This will result in the same file being
|
||||||
|
downloaded twice.
|
||||||
|
|
||||||
|
When the first process finishes the download, it will put the file into the
|
||||||
|
local cache location. When the second process finishes, it will atomically
|
||||||
|
replace it, but since it's the same file it will be the same file.
|
||||||
|
|
||||||
|
If the first compose already managed to hardlink the file before it gets
|
||||||
|
replaced, there will be two copies of the file present locally.
|
||||||
|
|
||||||
|
|
||||||
|
Integrity checking
|
||||||
|
------------------
|
||||||
|
|
||||||
|
There is minimal integrity checking. RPM packages belonging to real builds will
|
||||||
|
be check to match the checksum provided by Koji hub.
|
||||||
|
|
||||||
|
There is no checking for scratch builds or any images.
|
@ -1,107 +0,0 @@
|
|||||||
.. _multi_compose:
|
|
||||||
|
|
||||||
Managing compose from multiple parts
|
|
||||||
====================================
|
|
||||||
|
|
||||||
There may be cases where it makes sense to split a big compose into separate
|
|
||||||
parts, but create a compose output that links all output into one familiar
|
|
||||||
structure.
|
|
||||||
|
|
||||||
The `pungi-orchestrate` tools allows that.
|
|
||||||
|
|
||||||
It works with an INI-style configuration file. The ``[general]`` section
|
|
||||||
contains information about identity of the main compose. Other sections define
|
|
||||||
individual parts.
|
|
||||||
|
|
||||||
The parts are scheduled to run in parallel, with the minimal amount of
|
|
||||||
serialization. The final compose directory will contain hard-links to the
|
|
||||||
files.
|
|
||||||
|
|
||||||
|
|
||||||
General settings
|
|
||||||
----------------
|
|
||||||
|
|
||||||
**target**
|
|
||||||
Path to directory where the final compose should be created.
|
|
||||||
**compose_type**
|
|
||||||
Type of compose to make.
|
|
||||||
**release_name**
|
|
||||||
Name of the product for the final compose.
|
|
||||||
**release_short**
|
|
||||||
Short name of the product for the final compose.
|
|
||||||
**release_version**
|
|
||||||
Version of the product for the final compose.
|
|
||||||
**release_type**
|
|
||||||
Type of the product for the final compose.
|
|
||||||
**extra_args**
|
|
||||||
Additional arguments that will be passed to the child Pungi processes.
|
|
||||||
**koji_profile**
|
|
||||||
If specified, a current event will be retrieved from the Koji instance and
|
|
||||||
used for all parts.
|
|
||||||
|
|
||||||
**kerberos**
|
|
||||||
If set to yes, a kerberos ticket will be automatically created at the start.
|
|
||||||
Set keytab and principal as well.
|
|
||||||
**kerberos_keytab**
|
|
||||||
Path to keytab file used to create the kerberos ticket.
|
|
||||||
**kerberos_principal**
|
|
||||||
Kerberos principal for the ticket
|
|
||||||
|
|
||||||
**pre_compose_script**
|
|
||||||
Commands to execute before first part is started. Can contain multiple
|
|
||||||
commands on separate lines.
|
|
||||||
**post_compose_script**
|
|
||||||
Commands to execute after the last part finishes and final status is
|
|
||||||
updated. Can contain multiple commands on separate lines. ::
|
|
||||||
|
|
||||||
post_compose_script =
|
|
||||||
compose-latest-symlink $COMPOSE_PATH
|
|
||||||
custom-post-compose-script.sh
|
|
||||||
|
|
||||||
Multiple environment variables are defined for the scripts:
|
|
||||||
|
|
||||||
* ``COMPOSE_PATH``
|
|
||||||
* ``COMPOSE_ID``
|
|
||||||
* ``COMPOSE_DATE``
|
|
||||||
* ``COMPOSE_TYPE``
|
|
||||||
* ``COMPOSE_RESPIN``
|
|
||||||
* ``COMPOSE_LABEL``
|
|
||||||
* ``RELEASE_ID``
|
|
||||||
* ``RELEASE_NAME``
|
|
||||||
* ``RELEASE_SHORT``
|
|
||||||
* ``RELEASE_VERSION``
|
|
||||||
* ``RELEASE_TYPE``
|
|
||||||
* ``RELEASE_IS_LAYERED`` – ``YES`` for layered products, empty otherwise
|
|
||||||
* ``BASE_PRODUCT_NAME`` – only set for layered products
|
|
||||||
* ``BASE_PRODUCT_SHORT`` – only set for layered products
|
|
||||||
* ``BASE_PRODUCT_VERSION`` – only set for layered products
|
|
||||||
* ``BASE_PRODUCT_TYPE`` – only set for layered products
|
|
||||||
|
|
||||||
**notification_script**
|
|
||||||
Executable name (or path to a script) that will be used to send a message
|
|
||||||
once the compose is finished. In order for a valid URL to be included in the
|
|
||||||
message, at least one part must configure path translation that would apply
|
|
||||||
to location of main compose.
|
|
||||||
|
|
||||||
Only two messages will be sent, one for start and one for finish (either
|
|
||||||
successful or not).
|
|
||||||
|
|
||||||
|
|
||||||
Partial compose settings
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
Each part should have a separate section in the config file.
|
|
||||||
|
|
||||||
It can specify these options:
|
|
||||||
|
|
||||||
**config**
|
|
||||||
Path to configuration file that describes this part. If relative, it is
|
|
||||||
resolved relative to the file with parts configuration.
|
|
||||||
**just_phase**, **skip_phase**
|
|
||||||
Customize which phases should run for this part.
|
|
||||||
**depends_on**
|
|
||||||
A comma separated list of other parts that must be finished before this part
|
|
||||||
starts.
|
|
||||||
**failable**
|
|
||||||
A boolean toggle to mark a part as failable. A failure in such part will
|
|
||||||
mark the final compose as incomplete, but still successful.
|
|
@ -30,17 +30,14 @@ packages to architectures.
|
|||||||
Buildinstall
|
Buildinstall
|
||||||
------------
|
------------
|
||||||
|
|
||||||
Spawns a bunch of threads, each of which runs either ``lorax`` or
|
Spawns a bunch of threads, each of which runs the ``lorax`` command. The
|
||||||
``buildinstall`` command (the latter coming from ``anaconda`` package). The
|
|
||||||
commands create ``boot.iso`` and other boot configuration files. The image is
|
commands create ``boot.iso`` and other boot configuration files. The image is
|
||||||
finally linked into the ``compose/`` directory as netinstall media.
|
finally linked into the ``compose/`` directory as netinstall media.
|
||||||
|
|
||||||
The created images are also needed for creating live media or other images in
|
The created images are also needed for creating live media or other images in
|
||||||
later phases.
|
later phases.
|
||||||
|
|
||||||
With ``lorax`` this phase runs one task per variant.arch combination. For
|
With ``lorax`` this phase runs one task per variant.arch combination.
|
||||||
``buildinstall`` command there is only one task per architecture and
|
|
||||||
``product.img`` should be used to customize the results.
|
|
||||||
|
|
||||||
Gather
|
Gather
|
||||||
------
|
------
|
||||||
|
@ -41,6 +41,14 @@ which can contain following keys.
|
|||||||
* ``command`` -- defines a shell command to run after Git clone to generate the
|
* ``command`` -- defines a shell command to run after Git clone to generate the
|
||||||
needed file (for example to run ``make``). Only supported in Git backend.
|
needed file (for example to run ``make``). Only supported in Git backend.
|
||||||
|
|
||||||
|
* ``options`` -- a dictionary of additional configuration options. These are
|
||||||
|
specific to different backends.
|
||||||
|
|
||||||
|
Currently supported values for Git:
|
||||||
|
|
||||||
|
* ``credential_helper`` -- path to a credential helper used to supply
|
||||||
|
username/password for remotes that require authentication.
|
||||||
|
|
||||||
|
|
||||||
Koji examples
|
Koji examples
|
||||||
-------------
|
-------------
|
||||||
|
76
pungi.spec
76
pungi.spec
@ -1,5 +1,5 @@
|
|||||||
Name: pungi
|
Name: pungi
|
||||||
Version: 4.3.6
|
Version: 4.5.1
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: Distribution compose tool
|
Summary: Distribution compose tool
|
||||||
|
|
||||||
@ -100,7 +100,6 @@ rm -rf %{buildroot}
|
|||||||
%{_bindir}/%{name}-config-validate
|
%{_bindir}/%{name}-config-validate
|
||||||
%{_bindir}/%{name}-fedmsg-notification
|
%{_bindir}/%{name}-fedmsg-notification
|
||||||
%{_bindir}/%{name}-notification-report-progress
|
%{_bindir}/%{name}-notification-report-progress
|
||||||
%{_bindir}/%{name}-orchestrate
|
|
||||||
%{_bindir}/%{name}-patch-iso
|
%{_bindir}/%{name}-patch-iso
|
||||||
%{_bindir}/%{name}-compare-depsolving
|
%{_bindir}/%{name}-compare-depsolving
|
||||||
%{_bindir}/%{name}-wait-for-signed-ostree-handler
|
%{_bindir}/%{name}-wait-for-signed-ostree-handler
|
||||||
@ -111,6 +110,79 @@ pytest
|
|||||||
cd tests && ./test_compose.sh
|
cd tests && ./test_compose.sh
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Thu Sep 7 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.5.1-1
|
||||||
|
- gather_dnf.py: Do not raise error when the downloaded package exists.
|
||||||
|
(ounsal)
|
||||||
|
|
||||||
|
* Tue Aug 29 2023 Aditya Bisoi <abisoi@redhat.com> - 4.5.0-1
|
||||||
|
- kojiwrapper: Stop being smart about local access (lsedlar)
|
||||||
|
- Fix unittest errors (ounsal)
|
||||||
|
- Add integrity checking for builds (lsedlar)
|
||||||
|
- Add script for cleaning up the cache (lsedlar)
|
||||||
|
- Add ability to download images (lsedlar)
|
||||||
|
- Add support for not having koji volume mounted locally (lsedlar)
|
||||||
|
- Remove repository cloning multiple times (abisoi)
|
||||||
|
- Support require_all_comps_packages on DNF backend (lsedlar)
|
||||||
|
- Fix new warnings from flake8 (lsedlar)
|
||||||
|
|
||||||
|
* Tue Jul 25 2023 Aditya Bisoi <abisoi@redhat.com> - 4.4.1-1
|
||||||
|
- ostree: Add configuration for custom runroot packages (lsedlar)
|
||||||
|
- pkgset: Emit better error for missing modulemd file (lsedlar)
|
||||||
|
- Add support for git-credential-helper (lsedlar)
|
||||||
|
- Support OIDC Client Credentials authentication to CTS (hlin)
|
||||||
|
|
||||||
|
* Tue Jun 06 2023 Lubomír Sedlář <lsedlar@redhat.com> - 4.4.0-1
|
||||||
|
- gather-dnf: Run latest() later (lsedlar)
|
||||||
|
- iso: Support joliet long names (lsedlar)
|
||||||
|
- Drop pungi-orchestrator code (lsedlar)
|
||||||
|
- isos: Ensure proper file ownership and permissions (lsedlar)
|
||||||
|
- gather: Always get latest packages (lsedlar)
|
||||||
|
- Add back compatibility with jsonschema <3.0.0 (lsedlar)
|
||||||
|
- Remove useless debug message (lsedlar)
|
||||||
|
- Remove fedmsg from requirements (lsedlar)
|
||||||
|
- gather: Support dotarch in DNF backend (lsedlar)
|
||||||
|
- Set the priority in the fedora-messaging notifier (aurelien)
|
||||||
|
- Fix compatibility with createrepo_c 0.21.1 (lsedlar)
|
||||||
|
- comps: Apply arch filtering to environment/optionlist (lsedlar)
|
||||||
|
- Add config file for cleaning up cache files (hlin)
|
||||||
|
|
||||||
|
* Tue Mar 28 2023 Haibo Lin <hlin@redhat.com> - 4.3.8-1
|
||||||
|
- createiso: Update possibly changed file on DVD (lsedlar)
|
||||||
|
- pkgset: Stop reuse if configuration changed (lsedlar)
|
||||||
|
- Allow disabling inheriting ExcludeArch to noarch packages (lsedlar)
|
||||||
|
- pkgset: Support extra builds with no tags (lsedlar)
|
||||||
|
- buildinstall: Avoid pointlessly tweaking the boot images (lsedlar)
|
||||||
|
- Prevent to reuse if unsigned packages are allowed (hlin)
|
||||||
|
- Pass parent id/respin id to CTS (lsedlar)
|
||||||
|
- Exclude existing files in boot.iso (hlin)
|
||||||
|
- image-build/osbuild: Pull ISOs into the compose (lsedlar)
|
||||||
|
- Retry 401 error from CTS (lsedlar)
|
||||||
|
- gather: Better detection of debuginfo in lookaside (lsedlar)
|
||||||
|
- Log versions of all installed packages (hlin)
|
||||||
|
- Use authentication for all CTS calls (lsedlar)
|
||||||
|
- Fix black complaints (lsedlar)
|
||||||
|
- Add vhd.gz extension to compressed VHD images (lsedlar)
|
||||||
|
- Add vhd-compressed image type (lsedlar)
|
||||||
|
- Update to work with latest mock (lsedlar)
|
||||||
|
- Default bztar format for sdist command (onosek)
|
||||||
|
|
||||||
|
* Fri Dec 09 2022 Ondřej Nosek <onosek@redhat.com>
|
||||||
|
- osbuild: test passing of rich repos from configuration (lsedlar)
|
||||||
|
- osbuild: support specifying `package_sets` for repos (thozza)
|
||||||
|
- osbuild: don't use `util.get_repo_urls()` (thozza)
|
||||||
|
- osbuild: update schema and config documentation (thozza)
|
||||||
|
- Speed up tests by 30 seconds (lsedlar)
|
||||||
|
- Stop sending compose paths to CTS (lsedlar)
|
||||||
|
- Report errors from CTS (lsedlar)
|
||||||
|
- createiso: Create Joliet tree with xorriso (lsedlar)
|
||||||
|
- init: Filter comps for modular variants with tags (lsedlar)
|
||||||
|
- Retry failed cts requests (hlin)
|
||||||
|
- Ignore existing kerberos ticket for CTS auth (lsedlar)
|
||||||
|
- osbuild: support specifying upload_options (thozza)
|
||||||
|
- osbuild: accept only a single image type in the configuration (thozza)
|
||||||
|
- Add Jenkinsfile for CI (hlin)
|
||||||
|
- profiler: Flush stdout before printing (lsedlar)
|
||||||
|
|
||||||
* Fri Aug 26 2022 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.6-1
|
* Fri Aug 26 2022 Lubomír Sedlář <lsedlar@redhat.com> - 4.3.6-1
|
||||||
- pkgset: Report better error when module is missing an arch (lsedlar)
|
- pkgset: Report better error when module is missing an arch (lsedlar)
|
||||||
- osbuild: add support for building ostree artifacts (ondrej)
|
- osbuild: add support for building ostree artifacts (ondrej)
|
||||||
|
@ -227,9 +227,19 @@ def validate(config, offline=False, schema=None):
|
|||||||
DefaultValidator = _extend_with_default_and_alias(
|
DefaultValidator = _extend_with_default_and_alias(
|
||||||
jsonschema.Draft4Validator, offline=offline
|
jsonschema.Draft4Validator, offline=offline
|
||||||
)
|
)
|
||||||
validator = DefaultValidator(
|
|
||||||
schema,
|
if hasattr(jsonschema.Draft4Validator, "TYPE_CHECKER"):
|
||||||
)
|
# jsonschema >= 3.0 has new interface for checking types
|
||||||
|
validator = DefaultValidator(schema)
|
||||||
|
else:
|
||||||
|
validator = DefaultValidator(
|
||||||
|
schema,
|
||||||
|
{
|
||||||
|
"array": (tuple, list),
|
||||||
|
"regex": six.string_types,
|
||||||
|
"url": six.string_types,
|
||||||
|
},
|
||||||
|
)
|
||||||
errors = []
|
errors = []
|
||||||
warnings = []
|
warnings = []
|
||||||
for error in validator.iter_errors(config):
|
for error in validator.iter_errors(config):
|
||||||
@ -377,6 +387,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
instance[property]["branch"] = resolver(
|
instance[property]["branch"] = resolver(
|
||||||
instance[property]["repo"],
|
instance[property]["repo"],
|
||||||
instance[property].get("branch") or "HEAD",
|
instance[property].get("branch") or "HEAD",
|
||||||
|
instance[property].get("options"),
|
||||||
)
|
)
|
||||||
|
|
||||||
for error in _hook_errors(properties, instance, schema):
|
for error in _hook_errors(properties, instance, schema):
|
||||||
@ -444,15 +455,18 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
context=all_errors,
|
context=all_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_array(checker, instance):
|
kwargs = {}
|
||||||
return isinstance(instance, (tuple, list))
|
if hasattr(validator_class, "TYPE_CHECKER"):
|
||||||
|
# jsonschema >= 3
|
||||||
|
def is_array(checker, instance):
|
||||||
|
return isinstance(instance, (tuple, list))
|
||||||
|
|
||||||
def is_string_type(checker, instance):
|
def is_string_type(checker, instance):
|
||||||
return isinstance(instance, six.string_types)
|
return isinstance(instance, six.string_types)
|
||||||
|
|
||||||
type_checker = validator_class.TYPE_CHECKER.redefine_many(
|
kwargs["type_checker"] = validator_class.TYPE_CHECKER.redefine_many(
|
||||||
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
||||||
)
|
)
|
||||||
|
|
||||||
return jsonschema.validators.extend(
|
return jsonschema.validators.extend(
|
||||||
validator_class,
|
validator_class,
|
||||||
@ -464,7 +478,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
|||||||
"additionalProperties": _validate_additional_properties,
|
"additionalProperties": _validate_additional_properties,
|
||||||
"anyOf": _validate_any_of,
|
"anyOf": _validate_any_of,
|
||||||
},
|
},
|
||||||
type_checker=type_checker,
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -507,6 +521,13 @@ def make_schema():
|
|||||||
"file": {"type": "string"},
|
"file": {"type": "string"},
|
||||||
"dir": {"type": "string"},
|
"dir": {"type": "string"},
|
||||||
"command": {"type": "string"},
|
"command": {"type": "string"},
|
||||||
|
"options": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"credential_helper": {"type": "string"},
|
||||||
|
},
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
},
|
},
|
||||||
@ -551,7 +572,6 @@ def make_schema():
|
|||||||
},
|
},
|
||||||
"required": ["kickstart"],
|
"required": ["kickstart"],
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"type": "object",
|
|
||||||
},
|
},
|
||||||
"osbs_config": {
|
"osbs_config": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -792,7 +812,7 @@ def make_schema():
|
|||||||
"buildinstall_allow_reuse": {"type": "boolean", "default": False},
|
"buildinstall_allow_reuse": {"type": "boolean", "default": False},
|
||||||
"buildinstall_method": {
|
"buildinstall_method": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["lorax", "buildinstall"],
|
"enum": ["lorax"],
|
||||||
},
|
},
|
||||||
"buildinstall_topdir": {"type": "string"},
|
"buildinstall_topdir": {"type": "string"},
|
||||||
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||||
@ -811,8 +831,11 @@ def make_schema():
|
|||||||
"pdc_insecure": {"deprecated": "Koji is queried instead"},
|
"pdc_insecure": {"deprecated": "Koji is queried instead"},
|
||||||
"cts_url": {"type": "string"},
|
"cts_url": {"type": "string"},
|
||||||
"cts_keytab": {"type": "string"},
|
"cts_keytab": {"type": "string"},
|
||||||
|
"cts_oidc_token_url": {"type": "url"},
|
||||||
|
"cts_oidc_client_id": {"type": "string"},
|
||||||
"koji_profile": {"type": "string"},
|
"koji_profile": {"type": "string"},
|
||||||
"koji_event": {"type": "number"},
|
"koji_event": {"type": "number"},
|
||||||
|
"koji_cache": {"type": "string"},
|
||||||
"pkgset_koji_tag": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_tag": {"$ref": "#/definitions/strings"},
|
||||||
"pkgset_koji_builds": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_builds": {"$ref": "#/definitions/strings"},
|
||||||
"pkgset_koji_scratch_tasks": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_scratch_tasks": {"$ref": "#/definitions/strings"},
|
||||||
@ -830,6 +853,10 @@ def make_schema():
|
|||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"default": True,
|
"default": True,
|
||||||
},
|
},
|
||||||
|
"pkgset_inherit_exclusive_arch_to_noarch": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
},
|
||||||
"pkgset_scratch_modules": {
|
"pkgset_scratch_modules": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"patternProperties": {
|
"patternProperties": {
|
||||||
@ -1032,6 +1059,9 @@ def make_schema():
|
|||||||
"config_branch": {"type": "string"},
|
"config_branch": {"type": "string"},
|
||||||
"tag_ref": {"type": "boolean"},
|
"tag_ref": {"type": "boolean"},
|
||||||
"ostree_ref": {"type": "string"},
|
"ostree_ref": {"type": "string"},
|
||||||
|
"runroot_packages": {
|
||||||
|
"$ref": "#/definitions/list_of_strings",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"treefile",
|
"treefile",
|
||||||
@ -1188,14 +1218,37 @@ def make_schema():
|
|||||||
},
|
},
|
||||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
"arches": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"release": {"type": "string"},
|
"release": {"type": "string"},
|
||||||
"repo": {"$ref": "#/definitions/list_of_strings"},
|
"repo": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"required": ["baseurl"],
|
||||||
|
"properties": {
|
||||||
|
"baseurl": {"type": "string"},
|
||||||
|
"package_sets": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"type": "string"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{"type": "string"},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"subvariant": {"type": "string"},
|
"subvariant": {"type": "string"},
|
||||||
"ostree_url": {"type": "string"},
|
"ostree_url": {"type": "string"},
|
||||||
"ostree_ref": {"type": "string"},
|
"ostree_ref": {"type": "string"},
|
||||||
"ostree_parent": {"type": "string"},
|
"ostree_parent": {"type": "string"},
|
||||||
|
"manifest_type": {"type": "string"},
|
||||||
"upload_options": {
|
"upload_options": {
|
||||||
"oneOf": [
|
# this should be really 'oneOf', but the minimal
|
||||||
|
# required properties in AWSEC2 and GCP options
|
||||||
|
# overlap.
|
||||||
|
"anyOf": [
|
||||||
# AWSEC2UploadOptions
|
# AWSEC2UploadOptions
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -1234,7 +1287,6 @@ def make_schema():
|
|||||||
"tenant_id",
|
"tenant_id",
|
||||||
"subscription_id",
|
"subscription_id",
|
||||||
"resource_group",
|
"resource_group",
|
||||||
"location",
|
|
||||||
],
|
],
|
||||||
"properties": {
|
"properties": {
|
||||||
"tenant_id": {"type": "string"},
|
"tenant_id": {"type": "string"},
|
||||||
@ -1250,7 +1302,7 @@ def make_schema():
|
|||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": ["region", "bucket"],
|
"required": ["region"],
|
||||||
"properties": {
|
"properties": {
|
||||||
"region": {"type": "string"},
|
"region": {"type": "string"},
|
||||||
"bucket": {"type": "string"},
|
"bucket": {"type": "string"},
|
||||||
@ -1445,7 +1497,6 @@ def get_num_cpus():
|
|||||||
CONFIG_DEPS = {
|
CONFIG_DEPS = {
|
||||||
"buildinstall_method": {
|
"buildinstall_method": {
|
||||||
"conflicts": (
|
"conflicts": (
|
||||||
(lambda val: val == "buildinstall", ["lorax_options"]),
|
|
||||||
(lambda val: not val, ["lorax_options", "buildinstall_kickstart"]),
|
(lambda val: not val, ["lorax_options", "buildinstall_kickstart"]),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
170
pungi/compose.py
170
pungi/compose.py
@ -17,6 +17,7 @@
|
|||||||
__all__ = ("Compose",)
|
__all__ = ("Compose",)
|
||||||
|
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@ -38,6 +39,7 @@ from dogpile.cache import make_region
|
|||||||
from pungi.graph import SimpleAcyclicOrientedGraph
|
from pungi.graph import SimpleAcyclicOrientedGraph
|
||||||
from pungi.wrappers.variants import VariantsXmlParser
|
from pungi.wrappers.variants import VariantsXmlParser
|
||||||
from pungi.paths import Paths
|
from pungi.paths import Paths
|
||||||
|
from pungi.wrappers.kojiwrapper import KojiDownloadProxy
|
||||||
from pungi.wrappers.scm import get_file_from_scm
|
from pungi.wrappers.scm import get_file_from_scm
|
||||||
from pungi.util import (
|
from pungi.util import (
|
||||||
makedirs,
|
makedirs,
|
||||||
@ -57,14 +59,101 @@ except ImportError:
|
|||||||
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
||||||
|
|
||||||
|
|
||||||
|
def is_status_fatal(status_code):
|
||||||
|
"""Check if status code returned from CTS reports an error that is unlikely
|
||||||
|
to be fixed by retrying. Generally client errors (4XX) are fatal, with the
|
||||||
|
exception of 401 Unauthorized which could be caused by transient network
|
||||||
|
issue between compose host and KDC.
|
||||||
|
"""
|
||||||
|
if status_code == 401:
|
||||||
|
return False
|
||||||
|
return status_code >= 400 and status_code < 500
|
||||||
|
|
||||||
|
|
||||||
@retry(wait_on=RequestException)
|
@retry(wait_on=RequestException)
|
||||||
def retry_request(method, url, data=None, auth=None):
|
def retry_request(method, url, data=None, json_data=None, auth=None):
|
||||||
|
"""
|
||||||
|
:param str method: Reqest method.
|
||||||
|
:param str url: Target URL.
|
||||||
|
:param dict data: form-urlencoded data to send in the body of the request.
|
||||||
|
:param dict json_data: json data to send in the body of the request.
|
||||||
|
"""
|
||||||
request_method = getattr(requests, method)
|
request_method = getattr(requests, method)
|
||||||
rv = request_method(url, json=data, auth=auth)
|
rv = request_method(url, data=data, json=json_data, auth=auth)
|
||||||
|
if is_status_fatal(rv.status_code):
|
||||||
|
try:
|
||||||
|
error = rv.json()
|
||||||
|
except ValueError:
|
||||||
|
error = rv.text
|
||||||
|
raise RuntimeError("%s responded with %d: %s" % (url, rv.status_code, error))
|
||||||
rv.raise_for_status()
|
rv.raise_for_status()
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class BearerAuth(requests.auth.AuthBase):
|
||||||
|
def __init__(self, token):
|
||||||
|
self.token = token
|
||||||
|
|
||||||
|
def __call__(self, r):
|
||||||
|
r.headers["authorization"] = "Bearer " + self.token
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def cts_auth(pungi_conf):
|
||||||
|
"""
|
||||||
|
:param dict pungi_conf: dict obj of pungi.json config.
|
||||||
|
"""
|
||||||
|
auth = None
|
||||||
|
token = None
|
||||||
|
cts_keytab = pungi_conf.get("cts_keytab")
|
||||||
|
cts_oidc_token_url = os.environ.get("CTS_OIDC_TOKEN_URL", "") or pungi_conf.get(
|
||||||
|
"cts_oidc_token_url"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if cts_keytab:
|
||||||
|
# requests-kerberos cannot accept custom keytab, we need to use
|
||||||
|
# environment variable for this. But we need to change environment
|
||||||
|
# only temporarily just for this single requests.post.
|
||||||
|
# So at first backup the current environment and revert to it
|
||||||
|
# after the requests call.
|
||||||
|
from requests_kerberos import HTTPKerberosAuth
|
||||||
|
|
||||||
|
auth = HTTPKerberosAuth()
|
||||||
|
environ_copy = dict(os.environ)
|
||||||
|
if "$HOSTNAME" in cts_keytab:
|
||||||
|
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
||||||
|
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
||||||
|
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
||||||
|
elif cts_oidc_token_url:
|
||||||
|
cts_oidc_client_id = os.environ.get(
|
||||||
|
"CTS_OIDC_CLIENT_ID", ""
|
||||||
|
) or pungi_conf.get("cts_oidc_client_id", "")
|
||||||
|
token = retry_request(
|
||||||
|
"post",
|
||||||
|
cts_oidc_token_url,
|
||||||
|
data={
|
||||||
|
"grant_type": "client_credentials",
|
||||||
|
"client_id": cts_oidc_client_id,
|
||||||
|
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||||
|
},
|
||||||
|
).json()["access_token"]
|
||||||
|
auth = BearerAuth(token)
|
||||||
|
del token
|
||||||
|
|
||||||
|
yield auth
|
||||||
|
except Exception as e:
|
||||||
|
# Avoid leaking client secret in trackback
|
||||||
|
e.show_locals = False
|
||||||
|
raise e
|
||||||
|
finally:
|
||||||
|
if cts_keytab:
|
||||||
|
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
||||||
|
os.environ.clear()
|
||||||
|
os.environ.update(environ_copy)
|
||||||
|
|
||||||
|
|
||||||
def get_compose_info(
|
def get_compose_info(
|
||||||
conf,
|
conf,
|
||||||
compose_type="production",
|
compose_type="production",
|
||||||
@ -94,38 +183,19 @@ def get_compose_info(
|
|||||||
ci.compose.type = compose_type
|
ci.compose.type = compose_type
|
||||||
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
||||||
ci.compose.respin = compose_respin or 0
|
ci.compose.respin = compose_respin or 0
|
||||||
|
ci.compose.id = ci.create_compose_id()
|
||||||
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url")
|
||||||
if cts_url:
|
if cts_url:
|
||||||
# Requests-kerberos cannot accept custom keytab, we need to use
|
# Create compose in CTS and get the reserved compose ID.
|
||||||
# environment variable for this. But we need to change environment
|
url = os.path.join(cts_url, "api/1/composes/")
|
||||||
# only temporarily just for this single requests.post.
|
data = {
|
||||||
# So at first backup the current environment and revert to it
|
"compose_info": json.loads(ci.dumps()),
|
||||||
# after the requests.post call.
|
"parent_compose_ids": parent_compose_ids,
|
||||||
cts_keytab = conf.get("cts_keytab", None)
|
"respin_of": respin_of,
|
||||||
authentication = get_authentication(conf)
|
}
|
||||||
if cts_keytab:
|
with cts_auth(conf) as authentication:
|
||||||
environ_copy = dict(os.environ)
|
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||||
if "$HOSTNAME" in cts_keytab:
|
|
||||||
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
|
||||||
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
|
||||||
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create compose in CTS and get the reserved compose ID.
|
|
||||||
ci.compose.id = ci.create_compose_id()
|
|
||||||
url = os.path.join(cts_url, "api/1/composes/")
|
|
||||||
data = {
|
|
||||||
"compose_info": json.loads(ci.dumps()),
|
|
||||||
"parent_compose_ids": parent_compose_ids,
|
|
||||||
"respin_of": respin_of,
|
|
||||||
}
|
|
||||||
rv = retry_request("post", url, data=data, auth=authentication)
|
|
||||||
finally:
|
|
||||||
if cts_keytab:
|
|
||||||
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
|
||||||
os.environ.clear()
|
|
||||||
os.environ.update(environ_copy)
|
|
||||||
|
|
||||||
# Update local ComposeInfo with received ComposeInfo.
|
# Update local ComposeInfo with received ComposeInfo.
|
||||||
cts_ci = ComposeInfo()
|
cts_ci = ComposeInfo()
|
||||||
@ -133,22 +203,9 @@ def get_compose_info(
|
|||||||
ci.compose.respin = cts_ci.compose.respin
|
ci.compose.respin = cts_ci.compose.respin
|
||||||
ci.compose.id = cts_ci.compose.id
|
ci.compose.id = cts_ci.compose.id
|
||||||
|
|
||||||
else:
|
|
||||||
ci.compose.id = ci.create_compose_id()
|
|
||||||
|
|
||||||
return ci
|
return ci
|
||||||
|
|
||||||
|
|
||||||
def get_authentication(conf):
|
|
||||||
authentication = None
|
|
||||||
cts_keytab = conf.get("cts_keytab", None)
|
|
||||||
if cts_keytab:
|
|
||||||
from requests_kerberos import HTTPKerberosAuth
|
|
||||||
|
|
||||||
authentication = HTTPKerberosAuth()
|
|
||||||
return authentication
|
|
||||||
|
|
||||||
|
|
||||||
def write_compose_info(compose_dir, ci):
|
def write_compose_info(compose_dir, ci):
|
||||||
"""
|
"""
|
||||||
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
||||||
@ -162,17 +219,20 @@ def write_compose_info(compose_dir, ci):
|
|||||||
|
|
||||||
|
|
||||||
def update_compose_url(compose_id, compose_dir, conf):
|
def update_compose_url(compose_id, compose_dir, conf):
|
||||||
authentication = get_authentication(conf)
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url", None)
|
||||||
if cts_url:
|
if cts_url:
|
||||||
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
||||||
tp = conf.get("translate_paths", None)
|
tp = conf.get("translate_paths", None)
|
||||||
compose_url = translate_path_raw(tp, compose_dir)
|
compose_url = translate_path_raw(tp, compose_dir)
|
||||||
|
if compose_url == compose_dir:
|
||||||
|
# We do not have a URL, do not attempt the update.
|
||||||
|
return
|
||||||
data = {
|
data = {
|
||||||
"action": "set_url",
|
"action": "set_url",
|
||||||
"compose_url": compose_url,
|
"compose_url": compose_url,
|
||||||
}
|
}
|
||||||
return retry_request("patch", url, data=data, auth=authentication)
|
with cts_auth(conf) as authentication:
|
||||||
|
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||||
|
|
||||||
|
|
||||||
def get_compose_dir(
|
def get_compose_dir(
|
||||||
@ -183,11 +243,19 @@ def get_compose_dir(
|
|||||||
compose_respin=None,
|
compose_respin=None,
|
||||||
compose_label=None,
|
compose_label=None,
|
||||||
already_exists_callbacks=None,
|
already_exists_callbacks=None,
|
||||||
|
parent_compose_ids=None,
|
||||||
|
respin_of=None,
|
||||||
):
|
):
|
||||||
already_exists_callbacks = already_exists_callbacks or []
|
already_exists_callbacks = already_exists_callbacks or []
|
||||||
|
|
||||||
ci = get_compose_info(
|
ci = get_compose_info(
|
||||||
conf, compose_type, compose_date, compose_respin, compose_label
|
conf,
|
||||||
|
compose_type,
|
||||||
|
compose_date,
|
||||||
|
compose_respin,
|
||||||
|
compose_label,
|
||||||
|
parent_compose_ids,
|
||||||
|
respin_of,
|
||||||
)
|
)
|
||||||
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url", None)
|
||||||
@ -342,6 +410,8 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
else:
|
else:
|
||||||
self.cache_region = make_region().configure("dogpile.cache.null")
|
self.cache_region = make_region().configure("dogpile.cache.null")
|
||||||
|
|
||||||
|
self.koji_downloader = KojiDownloadProxy.from_config(self.conf, self._logger)
|
||||||
|
|
||||||
get_compose_info = staticmethod(get_compose_info)
|
get_compose_info = staticmethod(get_compose_info)
|
||||||
write_compose_info = staticmethod(write_compose_info)
|
write_compose_info = staticmethod(write_compose_info)
|
||||||
get_compose_dir = staticmethod(get_compose_dir)
|
get_compose_dir = staticmethod(get_compose_dir)
|
||||||
@ -637,7 +707,7 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
separators=(",", ": "),
|
separators=(",", ": "),
|
||||||
)
|
)
|
||||||
|
|
||||||
def traceback(self, detail=None):
|
def traceback(self, detail=None, show_locals=True):
|
||||||
"""Store an extended traceback. This method should only be called when
|
"""Store an extended traceback. This method should only be called when
|
||||||
handling an exception.
|
handling an exception.
|
||||||
|
|
||||||
@ -649,7 +719,7 @@ class Compose(kobo.log.LoggingBase):
|
|||||||
tb_path = self.paths.log.log_file("global", basename)
|
tb_path = self.paths.log.log_file("global", basename)
|
||||||
self.log_error("Extended traceback in: %s", tb_path)
|
self.log_error("Extended traceback in: %s", tb_path)
|
||||||
with open(tb_path, "wb") as f:
|
with open(tb_path, "wb") as f:
|
||||||
f.write(kobo.tback.Traceback().get_traceback())
|
f.write(kobo.tback.Traceback(show_locals=show_locals).get_traceback())
|
||||||
|
|
||||||
def load_old_compose_config(self):
|
def load_old_compose_config(self):
|
||||||
"""
|
"""
|
||||||
|
@ -5,11 +5,14 @@ from __future__ import print_function
|
|||||||
import os
|
import os
|
||||||
import six
|
import six
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
from kobo.shortcuts import run
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
|
|
||||||
from .wrappers import iso
|
from .wrappers import iso
|
||||||
from .wrappers.jigdo import JigdoWrapper
|
from .wrappers.jigdo import JigdoWrapper
|
||||||
|
|
||||||
|
from .phases.buildinstall import BOOT_CONFIGS, BOOT_IMAGES
|
||||||
|
|
||||||
|
|
||||||
CreateIsoOpts = namedtuple(
|
CreateIsoOpts = namedtuple(
|
||||||
"CreateIsoOpts",
|
"CreateIsoOpts",
|
||||||
@ -64,10 +67,6 @@ def make_image(f, opts):
|
|||||||
os.path.join("$TEMPLATE", "config_files/ppc"),
|
os.path.join("$TEMPLATE", "config_files/ppc"),
|
||||||
hfs_compat=opts.hfs_compat,
|
hfs_compat=opts.hfs_compat,
|
||||||
)
|
)
|
||||||
elif opts.buildinstall_method == "buildinstall":
|
|
||||||
mkisofs_kwargs["boot_args"] = iso.get_boot_options(
|
|
||||||
opts.arch, "/usr/lib/anaconda-runtime/boot"
|
|
||||||
)
|
|
||||||
|
|
||||||
# ppc(64) doesn't seem to support utf-8
|
# ppc(64) doesn't seem to support utf-8
|
||||||
if opts.arch in ("ppc", "ppc64", "ppc64le"):
|
if opts.arch in ("ppc", "ppc64", "ppc64le"):
|
||||||
@ -118,23 +117,73 @@ def make_jigdo(f, opts):
|
|||||||
emit(f, cmd)
|
emit(f, cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_perms(fs_path):
|
||||||
|
"""Compute proper permissions for a file.
|
||||||
|
|
||||||
|
This mimicks what -rational-rock option of genisoimage does. All read bits
|
||||||
|
are set, so that files and directories are globally readable. If any
|
||||||
|
execute bit is set for a file, set them all. No writes are allowed and
|
||||||
|
special bits are erased too.
|
||||||
|
"""
|
||||||
|
statinfo = os.stat(fs_path)
|
||||||
|
perms = 0o444
|
||||||
|
if statinfo.st_mode & 0o111:
|
||||||
|
perms |= 0o111
|
||||||
|
return perms
|
||||||
|
|
||||||
|
|
||||||
def write_xorriso_commands(opts):
|
def write_xorriso_commands(opts):
|
||||||
|
# Create manifest for the boot.iso listing all contents
|
||||||
|
boot_iso_manifest = "%s.manifest" % os.path.join(
|
||||||
|
opts.script_dir, os.path.basename(opts.boot_iso)
|
||||||
|
)
|
||||||
|
run(
|
||||||
|
iso.get_manifest_cmd(
|
||||||
|
opts.boot_iso, opts.use_xorrisofs, output_file=boot_iso_manifest
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Find which files may have been updated by pungi. This only includes a few
|
||||||
|
# files from tweaking buildinstall and .discinfo metadata. There's no good
|
||||||
|
# way to detect whether the boot config files actually changed, so we may
|
||||||
|
# be updating files in the ISO with the same data.
|
||||||
|
UPDATEABLE_FILES = set(BOOT_IMAGES + BOOT_CONFIGS + [".discinfo"])
|
||||||
|
updated_files = set()
|
||||||
|
excluded_files = set()
|
||||||
|
with open(boot_iso_manifest) as f:
|
||||||
|
for line in f:
|
||||||
|
path = line.lstrip("/").rstrip("\n")
|
||||||
|
if path in UPDATEABLE_FILES:
|
||||||
|
updated_files.add(path)
|
||||||
|
else:
|
||||||
|
excluded_files.add(path)
|
||||||
|
|
||||||
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
||||||
with open(script, "w") as f:
|
with open(script, "w") as f:
|
||||||
emit(f, "-indev %s" % opts.boot_iso)
|
emit(f, "-indev %s" % opts.boot_iso)
|
||||||
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
|
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
|
||||||
emit(f, "-boot_image any replay")
|
emit(f, "-boot_image any replay")
|
||||||
emit(f, "-volid %s" % opts.volid)
|
emit(f, "-volid %s" % opts.volid)
|
||||||
|
# isoinfo -J uses the Joliet tree, and it's used by virt-install
|
||||||
|
emit(f, "-joliet on")
|
||||||
|
# Support long filenames in the Joliet trees. Repodata is particularly
|
||||||
|
# likely to run into this limit.
|
||||||
|
emit(f, "-compliance joliet_long_names")
|
||||||
|
|
||||||
with open(opts.graft_points) as gp:
|
with open(opts.graft_points) as gp:
|
||||||
for line in gp:
|
for line in gp:
|
||||||
iso_path, fs_path = line.strip().split("=", 1)
|
iso_path, fs_path = line.strip().split("=", 1)
|
||||||
emit(f, "-map %s %s" % (fs_path, iso_path))
|
if iso_path in excluded_files:
|
||||||
|
continue
|
||||||
|
cmd = "-update" if iso_path in updated_files else "-map"
|
||||||
|
emit(f, "%s %s %s" % (cmd, fs_path, iso_path))
|
||||||
|
emit(f, "-chmod 0%o %s" % (_get_perms(fs_path), iso_path))
|
||||||
|
|
||||||
if opts.arch == "ppc64le":
|
if opts.arch == "ppc64le":
|
||||||
# This is needed for the image to be bootable.
|
# This is needed for the image to be bootable.
|
||||||
emit(f, "-as mkisofs -U --")
|
emit(f, "-as mkisofs -U --")
|
||||||
|
|
||||||
|
emit(f, "-chown_r 0 /")
|
||||||
|
emit(f, "-chgrp_r 0 /")
|
||||||
emit(f, "-end")
|
emit(f, "-end")
|
||||||
return script
|
return script
|
||||||
|
|
||||||
|
@ -1118,7 +1118,6 @@ class Pungi(PungiBase):
|
|||||||
self.logger.info("Finished gathering package objects.")
|
self.logger.info("Finished gathering package objects.")
|
||||||
|
|
||||||
def gather(self):
|
def gather(self):
|
||||||
|
|
||||||
# get package objects according to the input list
|
# get package objects according to the input list
|
||||||
self.getPackageObjects()
|
self.getPackageObjects()
|
||||||
if self.is_sources:
|
if self.is_sources:
|
||||||
|
@ -15,17 +15,21 @@
|
|||||||
|
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from itertools import count
|
from functools import cmp_to_key
|
||||||
|
from itertools import count, groupby
|
||||||
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from kobo.rpmlib import parse_nvra
|
from kobo.rpmlib import parse_nvra
|
||||||
|
import rpm
|
||||||
|
|
||||||
import pungi.common
|
import pungi.common
|
||||||
import pungi.dnf_wrapper
|
import pungi.dnf_wrapper
|
||||||
import pungi.multilib_dnf
|
import pungi.multilib_dnf
|
||||||
import pungi.util
|
import pungi.util
|
||||||
|
from pungi import arch_utils
|
||||||
from pungi.linker import Linker
|
from pungi.linker import Linker
|
||||||
from pungi.profiler import Profiler
|
from pungi.profiler import Profiler
|
||||||
from pungi.util import DEBUG_PATTERNS
|
from pungi.util import DEBUG_PATTERNS
|
||||||
@ -245,12 +249,36 @@ class Gather(GatherBase):
|
|||||||
# from lookaside. This can be achieved by removing any package that is
|
# from lookaside. This can be achieved by removing any package that is
|
||||||
# also in lookaside from the list.
|
# also in lookaside from the list.
|
||||||
lookaside_pkgs = set()
|
lookaside_pkgs = set()
|
||||||
for pkg in package_list:
|
|
||||||
if pkg.repoid in self.opts.lookaside_repos:
|
|
||||||
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
|
||||||
|
|
||||||
if self.opts.greedy_method == "all":
|
if self.opts.lookaside_repos:
|
||||||
return list(package_list)
|
# We will call `latest()` to get the highest version packages only.
|
||||||
|
# However, that is per name and architecture. If a package switches
|
||||||
|
# from arched to noarch or the other way, it is possible that the
|
||||||
|
# package_list contains different versions in main repos and in
|
||||||
|
# lookaside repos.
|
||||||
|
# We need to manually filter the latest version.
|
||||||
|
def vercmp(x, y):
|
||||||
|
return rpm.labelCompare(x[1], y[1])
|
||||||
|
|
||||||
|
# Annotate the packages with their version.
|
||||||
|
versioned_packages = [
|
||||||
|
(pkg, (str(pkg.epoch) or "0", pkg.version, pkg.release))
|
||||||
|
for pkg in package_list
|
||||||
|
]
|
||||||
|
# Sort the packages newest first.
|
||||||
|
sorted_packages = sorted(
|
||||||
|
versioned_packages, key=cmp_to_key(vercmp), reverse=True
|
||||||
|
)
|
||||||
|
# Group packages by version, take the first group and discard the
|
||||||
|
# version info from the tuple.
|
||||||
|
package_list = list(
|
||||||
|
x[0] for x in next(groupby(sorted_packages, key=lambda x: x[1]))[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Now we can decide what is used from lookaside.
|
||||||
|
for pkg in package_list:
|
||||||
|
if pkg.repoid in self.opts.lookaside_repos:
|
||||||
|
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
||||||
|
|
||||||
all_pkgs = []
|
all_pkgs = []
|
||||||
for pkg in package_list:
|
for pkg in package_list:
|
||||||
@ -263,16 +291,21 @@ class Gather(GatherBase):
|
|||||||
|
|
||||||
if not debuginfo:
|
if not debuginfo:
|
||||||
native_pkgs = set(
|
native_pkgs = set(
|
||||||
self.q_native_binary_packages.filter(pkg=all_pkgs).apply()
|
self.q_native_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
)
|
)
|
||||||
multilib_pkgs = set(
|
multilib_pkgs = set(
|
||||||
self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply()
|
self.q_multilib_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
native_pkgs = set(self.q_native_debug_packages.filter(pkg=all_pkgs).apply())
|
native_pkgs = set(
|
||||||
multilib_pkgs = set(
|
self.q_native_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply()
|
|
||||||
)
|
)
|
||||||
|
multilib_pkgs = set(
|
||||||
|
self.q_multilib_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.opts.greedy_method == "all":
|
||||||
|
return list(native_pkgs | multilib_pkgs)
|
||||||
|
|
||||||
result = set()
|
result = set()
|
||||||
|
|
||||||
@ -392,9 +425,7 @@ class Gather(GatherBase):
|
|||||||
"""Given an name of a queue (stored as attribute in `self`), exclude
|
"""Given an name of a queue (stored as attribute in `self`), exclude
|
||||||
all given packages and keep only the latest per package name and arch.
|
all given packages and keep only the latest per package name and arch.
|
||||||
"""
|
"""
|
||||||
setattr(
|
setattr(self, queue, getattr(self, queue).filter(pkg__neq=exclude).apply())
|
||||||
self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply()
|
|
||||||
)
|
|
||||||
|
|
||||||
@Profiler("Gather._apply_excludes()")
|
@Profiler("Gather._apply_excludes()")
|
||||||
def _apply_excludes(self, excludes):
|
def _apply_excludes(self, excludes):
|
||||||
@ -500,12 +531,21 @@ class Gather(GatherBase):
|
|||||||
name__glob=pattern[:-2]
|
name__glob=pattern[:-2]
|
||||||
).apply()
|
).apply()
|
||||||
else:
|
else:
|
||||||
pkgs = self.q_binary_packages.filter(
|
kwargs = {"name__glob": pattern}
|
||||||
name__glob=pattern
|
if "." in pattern:
|
||||||
).apply()
|
# The pattern could be name.arch. Check if the
|
||||||
|
# arch is valid, and if yes, make a more
|
||||||
|
# specific query.
|
||||||
|
name, arch = pattern.split(".", 1)
|
||||||
|
if arch in arch_utils.arches:
|
||||||
|
kwargs["name__glob"] = name
|
||||||
|
kwargs["arch__eq"] = arch
|
||||||
|
pkgs = self.q_binary_packages.filter(**kwargs).apply()
|
||||||
|
|
||||||
if not pkgs:
|
if not pkgs:
|
||||||
self.logger.error("No package matches pattern %s" % pattern)
|
self.logger.error(
|
||||||
|
"Could not find a match for %s in any configured repo", pattern
|
||||||
|
)
|
||||||
|
|
||||||
# The pattern could have been a glob. In that case we want to
|
# The pattern could have been a glob. In that case we want to
|
||||||
# group the packages by name and get best match in those
|
# group the packages by name and get best match in those
|
||||||
@ -616,7 +656,6 @@ class Gather(GatherBase):
|
|||||||
return added
|
return added
|
||||||
|
|
||||||
for pkg in self.result_debug_packages.copy():
|
for pkg in self.result_debug_packages.copy():
|
||||||
|
|
||||||
if pkg not in self.finished_add_debug_package_deps:
|
if pkg not in self.finished_add_debug_package_deps:
|
||||||
deps = self._get_package_deps(pkg, debuginfo=True)
|
deps = self._get_package_deps(pkg, debuginfo=True)
|
||||||
for i, req in deps:
|
for i, req in deps:
|
||||||
@ -784,7 +823,6 @@ class Gather(GatherBase):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
debug_pkgs = []
|
debug_pkgs = []
|
||||||
pkg_in_lookaside = pkg.repoid in self.opts.lookaside_repos
|
|
||||||
for i in candidates:
|
for i in candidates:
|
||||||
if pkg.arch != i.arch:
|
if pkg.arch != i.arch:
|
||||||
continue
|
continue
|
||||||
@ -792,7 +830,7 @@ class Gather(GatherBase):
|
|||||||
# If it's not debugsource package or does not match name of
|
# If it's not debugsource package or does not match name of
|
||||||
# the package, we don't want it in.
|
# the package, we don't want it in.
|
||||||
continue
|
continue
|
||||||
if i.repoid in self.opts.lookaside_repos or pkg_in_lookaside:
|
if self.is_from_lookaside(i):
|
||||||
self._set_flag(i, PkgFlag.lookaside)
|
self._set_flag(i, PkgFlag.lookaside)
|
||||||
if i not in self.result_debug_packages:
|
if i not in self.result_debug_packages:
|
||||||
added.add(i)
|
added.add(i)
|
||||||
@ -1030,9 +1068,12 @@ class Gather(GatherBase):
|
|||||||
# Link downloaded package in (or link package from file repo)
|
# Link downloaded package in (or link package from file repo)
|
||||||
try:
|
try:
|
||||||
linker.link(pkg.localPkg(), target)
|
linker.link(pkg.localPkg(), target)
|
||||||
except Exception:
|
except Exception as ex:
|
||||||
self.logger.error("Unable to link %s from the yum cache." % pkg.name)
|
if ex.errno == errno.EEXIST:
|
||||||
raise
|
self.logger.warning("Downloaded package exists in %s", target)
|
||||||
|
else:
|
||||||
|
self.logger.error("Unable to link %s from the yum cache.", pkg.name)
|
||||||
|
raise
|
||||||
|
|
||||||
def log_count(self, msg, method, *args):
|
def log_count(self, msg, method, *args):
|
||||||
"""
|
"""
|
||||||
|
@ -306,11 +306,6 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
|||||||
if variant.type in ("addon",) or variant.is_empty:
|
if variant.type in ("addon",) or variant.is_empty:
|
||||||
return
|
return
|
||||||
|
|
||||||
compose.log_debug(
|
|
||||||
"on arch '%s' looking at variant '%s' of type '%s'"
|
|
||||||
% (arch, variant, variant.type)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not timestamp:
|
if not timestamp:
|
||||||
timestamp = int(time.time())
|
timestamp = int(time.time())
|
||||||
else:
|
else:
|
||||||
|
@ -219,10 +219,6 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
return repos
|
return repos
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
lorax = LoraxWrapper()
|
|
||||||
product = self.compose.conf["release_name"]
|
|
||||||
version = self.compose.conf["release_version"]
|
|
||||||
release = self.compose.conf["release_version"]
|
|
||||||
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
disc_type = self.compose.conf["disc_types"].get("dvd", "dvd")
|
||||||
|
|
||||||
# Prepare kickstart file for final images.
|
# Prepare kickstart file for final images.
|
||||||
@ -275,29 +271,12 @@ class BuildinstallPhase(PhaseBase):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif self.buildinstall_method == "buildinstall":
|
|
||||||
volid = get_volid(self.compose, arch, disc_type=disc_type)
|
|
||||||
commands.append(
|
|
||||||
(
|
|
||||||
None,
|
|
||||||
lorax.get_buildinstall_cmd(
|
|
||||||
product,
|
|
||||||
version,
|
|
||||||
release,
|
|
||||||
repo_baseurls,
|
|
||||||
output_dir,
|
|
||||||
is_final=self.compose.supported,
|
|
||||||
buildarch=arch,
|
|
||||||
volid=volid,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
||||||
)
|
)
|
||||||
|
|
||||||
for (variant, cmd) in commands:
|
for variant, cmd in commands:
|
||||||
self.pool.add(BuildinstallThread(self.pool))
|
self.pool.add(BuildinstallThread(self.pool))
|
||||||
self.pool.queue_put(
|
self.pool.queue_put(
|
||||||
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
||||||
@ -364,9 +343,17 @@ BOOT_CONFIGS = [
|
|||||||
"EFI/BOOT/BOOTX64.conf",
|
"EFI/BOOT/BOOTX64.conf",
|
||||||
"EFI/BOOT/grub.cfg",
|
"EFI/BOOT/grub.cfg",
|
||||||
]
|
]
|
||||||
|
BOOT_IMAGES = [
|
||||||
|
"images/efiboot.img",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
||||||
|
"""
|
||||||
|
Put escaped volume ID and possibly kickstart file into the boot
|
||||||
|
configuration files.
|
||||||
|
:returns: list of paths to modified config files
|
||||||
|
"""
|
||||||
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
||||||
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
||||||
found_configs = []
|
found_configs = []
|
||||||
@ -374,7 +361,6 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
config_path = os.path.join(path, config)
|
config_path = os.path.join(path, config)
|
||||||
if not os.path.exists(config_path):
|
if not os.path.exists(config_path):
|
||||||
continue
|
continue
|
||||||
found_configs.append(config)
|
|
||||||
|
|
||||||
with open(config_path, "r") as f:
|
with open(config_path, "r") as f:
|
||||||
data = original_data = f.read()
|
data = original_data = f.read()
|
||||||
@ -394,8 +380,13 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
|||||||
with open(config_path, "w") as f:
|
with open(config_path, "w") as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
if logger and data != original_data:
|
if data != original_data:
|
||||||
logger.info("Boot config %s changed" % config_path)
|
found_configs.append(config)
|
||||||
|
if logger:
|
||||||
|
# Generally lorax should create file with correct volume id
|
||||||
|
# already. If we don't have a kickstart, this function should
|
||||||
|
# be a no-op.
|
||||||
|
logger.info("Boot config %s changed" % config_path)
|
||||||
|
|
||||||
return found_configs
|
return found_configs
|
||||||
|
|
||||||
@ -434,31 +425,32 @@ def tweak_buildinstall(
|
|||||||
if kickstart_file and found_configs:
|
if kickstart_file and found_configs:
|
||||||
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
||||||
|
|
||||||
images = [
|
images = [os.path.join(tmp_dir, img) for img in BOOT_IMAGES]
|
||||||
os.path.join(tmp_dir, "images", "efiboot.img"),
|
if found_configs:
|
||||||
]
|
for image in images:
|
||||||
for image in images:
|
if not os.path.isfile(image):
|
||||||
if not os.path.isfile(image):
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
with iso.mount(
|
with iso.mount(
|
||||||
image,
|
image,
|
||||||
logger=compose._logger,
|
logger=compose._logger,
|
||||||
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
||||||
) as mount_tmp_dir:
|
) as mount_tmp_dir:
|
||||||
for config in BOOT_CONFIGS:
|
for config in found_configs:
|
||||||
config_path = os.path.join(tmp_dir, config)
|
# Put each modified config file into the image (overwriting the
|
||||||
config_in_image = os.path.join(mount_tmp_dir, config)
|
# original).
|
||||||
|
config_path = os.path.join(tmp_dir, config)
|
||||||
|
config_in_image = os.path.join(mount_tmp_dir, config)
|
||||||
|
|
||||||
if os.path.isfile(config_in_image):
|
if os.path.isfile(config_in_image):
|
||||||
cmd = [
|
cmd = [
|
||||||
"cp",
|
"cp",
|
||||||
"-v",
|
"-v",
|
||||||
"--remove-destination",
|
"--remove-destination",
|
||||||
config_path,
|
config_path,
|
||||||
config_in_image,
|
config_in_image,
|
||||||
]
|
]
|
||||||
run(cmd)
|
run(cmd)
|
||||||
|
|
||||||
# HACK: make buildinstall files world readable
|
# HACK: make buildinstall files world readable
|
||||||
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
||||||
@ -814,8 +806,6 @@ class BuildinstallThread(WorkerThread):
|
|||||||
if buildinstall_method == "lorax":
|
if buildinstall_method == "lorax":
|
||||||
packages += ["lorax"]
|
packages += ["lorax"]
|
||||||
chown_paths.append(_get_log_dir(compose, variant, arch))
|
chown_paths.append(_get_log_dir(compose, variant, arch))
|
||||||
elif buildinstall_method == "buildinstall":
|
|
||||||
packages += ["anaconda"]
|
|
||||||
packages += get_arch_variant_data(
|
packages += get_arch_variant_data(
|
||||||
compose.conf, "buildinstall_packages", arch, variant
|
compose.conf, "buildinstall_packages", arch, variant
|
||||||
)
|
)
|
||||||
|
@ -154,6 +154,13 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
disc_num=cmd["disc_num"],
|
disc_num=cmd["disc_num"],
|
||||||
disc_count=cmd["disc_count"],
|
disc_count=cmd["disc_count"],
|
||||||
)
|
)
|
||||||
|
if self.compose.notifier:
|
||||||
|
self.compose.notifier.send(
|
||||||
|
"createiso-imagedone",
|
||||||
|
file=cmd["iso_path"],
|
||||||
|
arch=arch,
|
||||||
|
variant=str(variant),
|
||||||
|
)
|
||||||
|
|
||||||
def try_reuse(self, cmd, variant, arch, opts):
|
def try_reuse(self, cmd, variant, arch, opts):
|
||||||
"""Try to reuse image from previous compose.
|
"""Try to reuse image from previous compose.
|
||||||
@ -369,7 +376,7 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
|||||||
if self.compose.notifier:
|
if self.compose.notifier:
|
||||||
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
||||||
|
|
||||||
for (cmd, variant, arch) in commands:
|
for cmd, variant, arch in commands:
|
||||||
self.pool.add(CreateIsoThread(self.pool))
|
self.pool.add(CreateIsoThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||||
|
|
||||||
@ -539,7 +546,6 @@ def run_createiso_command(num, compose, bootable, arch, cmd, mounts, log_file):
|
|||||||
if bootable:
|
if bootable:
|
||||||
extra_packages = {
|
extra_packages = {
|
||||||
"lorax": ["lorax", "which"],
|
"lorax": ["lorax", "which"],
|
||||||
"buildinstall": ["anaconda"],
|
|
||||||
}
|
}
|
||||||
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
|
packages.extend(extra_packages[compose.conf["buildinstall_method"]])
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
|||||||
for arch in sorted(arches):
|
for arch in sorted(arches):
|
||||||
commands.append((config, variant, arch))
|
commands.append((config, variant, arch))
|
||||||
|
|
||||||
for (config, variant, arch) in commands:
|
for config, variant, arch in commands:
|
||||||
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
||||||
self.pool.queue_put((self.compose, config, variant, arch))
|
self.pool.queue_put((self.compose, config, variant, arch))
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ class GatherPhase(PhaseBase):
|
|||||||
|
|
||||||
# check whether variants from configuration value
|
# check whether variants from configuration value
|
||||||
# 'variant_as_lookaside' are correct
|
# 'variant_as_lookaside' are correct
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for requiring, required in variant_as_lookaside:
|
||||||
if requiring in all_variants and required not in all_variants:
|
if requiring in all_variants and required not in all_variants:
|
||||||
errors.append(
|
errors.append(
|
||||||
"variant_as_lookaside: variant %r doesn't exist but is "
|
"variant_as_lookaside: variant %r doesn't exist but is "
|
||||||
@ -99,7 +99,7 @@ class GatherPhase(PhaseBase):
|
|||||||
|
|
||||||
# check whether variants from configuration value
|
# check whether variants from configuration value
|
||||||
# 'variant_as_lookaside' have same architectures
|
# 'variant_as_lookaside' have same architectures
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for requiring, required in variant_as_lookaside:
|
||||||
if (
|
if (
|
||||||
requiring in all_variants
|
requiring in all_variants
|
||||||
and required in all_variants
|
and required in all_variants
|
||||||
@ -235,7 +235,7 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets, methods):
|
|||||||
if not hasattr(compose, "_gather_reused_variant_arch"):
|
if not hasattr(compose, "_gather_reused_variant_arch"):
|
||||||
setattr(compose, "_gather_reused_variant_arch", [])
|
setattr(compose, "_gather_reused_variant_arch", [])
|
||||||
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for requiring, required in variant_as_lookaside:
|
||||||
if (
|
if (
|
||||||
requiring == variant.uid
|
requiring == variant.uid
|
||||||
and (required, arch) not in compose._gather_reused_variant_arch
|
and (required, arch) not in compose._gather_reused_variant_arch
|
||||||
@ -468,9 +468,7 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
|||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
for source_name in ("module", "comps", "json"):
|
for source_name in ("module", "comps", "json"):
|
||||||
|
|
||||||
packages, groups, filter_packages = get_variant_packages(
|
packages, groups, filter_packages = get_variant_packages(
|
||||||
compose, arch, variant, source_name, package_sets
|
compose, arch, variant, source_name, package_sets
|
||||||
)
|
)
|
||||||
@ -575,7 +573,6 @@ def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs
|
|||||||
move_to_parent_pkgs = _mk_pkg_map()
|
move_to_parent_pkgs = _mk_pkg_map()
|
||||||
removed_pkgs = _mk_pkg_map()
|
removed_pkgs = _mk_pkg_map()
|
||||||
for pkg_type, pkgs in pkg_map.items():
|
for pkg_type, pkgs in pkg_map.items():
|
||||||
|
|
||||||
new_pkgs = []
|
new_pkgs = []
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
pkg_path = pkg["path"]
|
pkg_path = pkg["path"]
|
||||||
@ -647,9 +644,10 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
|||||||
compose.paths.work.topdir(arch="global"), "download"
|
compose.paths.work.topdir(arch="global"), "download"
|
||||||
)
|
)
|
||||||
+ "/",
|
+ "/",
|
||||||
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
"koji": lambda: compose.conf.get(
|
||||||
compose
|
"koji_cache",
|
||||||
).koji_module.config.topdir.rstrip("/")
|
pungi.wrappers.kojiwrapper.KojiWrapper(compose).koji_module.config.topdir,
|
||||||
|
).rstrip("/")
|
||||||
+ "/",
|
+ "/",
|
||||||
}
|
}
|
||||||
path_prefix = prefixes[compose.conf["pkgset_source"]]()
|
path_prefix = prefixes[compose.conf["pkgset_source"]]()
|
||||||
|
@ -47,9 +47,15 @@ class FakePackage(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def files(self):
|
def files(self):
|
||||||
return [
|
paths = []
|
||||||
os.path.join(dirname, basename) for (_, dirname, basename) in self.pkg.files
|
# createrepo_c.Package.files is a tuple, but its length differs across
|
||||||
]
|
# versions. The constants define index at which the related value is
|
||||||
|
# located.
|
||||||
|
for entry in self.pkg.files:
|
||||||
|
paths.append(
|
||||||
|
os.path.join(entry[cr.FILE_ENTRY_PATH], entry[cr.FILE_ENTRY_NAME])
|
||||||
|
)
|
||||||
|
return paths
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def provides(self):
|
def provides(self):
|
||||||
|
@ -25,6 +25,7 @@ from productmd.rpms import Rpms
|
|||||||
# results will be pulled into the compose.
|
# results will be pulled into the compose.
|
||||||
EXTENSIONS = {
|
EXTENSIONS = {
|
||||||
"docker": ["tar.gz", "tar.xz"],
|
"docker": ["tar.gz", "tar.xz"],
|
||||||
|
"iso": ["iso"],
|
||||||
"liveimg-squashfs": ["liveimg.squashfs"],
|
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||||
"qcow": ["qcow"],
|
"qcow": ["qcow"],
|
||||||
"qcow2": ["qcow2"],
|
"qcow2": ["qcow2"],
|
||||||
@ -39,6 +40,7 @@ EXTENSIONS = {
|
|||||||
"vdi": ["vdi"],
|
"vdi": ["vdi"],
|
||||||
"vmdk": ["vmdk"],
|
"vmdk": ["vmdk"],
|
||||||
"vpc": ["vhd"],
|
"vpc": ["vhd"],
|
||||||
|
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||||
"vsphere-ova": ["vsphere.ova"],
|
"vsphere-ova": ["vsphere.ova"],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -344,7 +346,9 @@ class CreateImageBuildThread(WorkerThread):
|
|||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = os.path.realpath(image_info["path"])
|
src_file = compose.koji_downloader.get_file(
|
||||||
|
os.path.realpath(image_info["path"])
|
||||||
|
)
|
||||||
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
|
@ -117,7 +117,7 @@ class LiveImagesPhase(
|
|||||||
|
|
||||||
commands.append((cmd, variant, arch))
|
commands.append((cmd, variant, arch))
|
||||||
|
|
||||||
for (cmd, variant, arch) in commands:
|
for cmd, variant, arch in commands:
|
||||||
self.pool.add(CreateLiveImageThread(self.pool))
|
self.pool.add(CreateLiveImageThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||||
|
|
||||||
@ -232,7 +232,7 @@ class CreateLiveImageThread(WorkerThread):
|
|||||||
"Got %d images from task %d, expected 1."
|
"Got %d images from task %d, expected 1."
|
||||||
% (len(image_path), output["task_id"])
|
% (len(image_path), output["task_id"])
|
||||||
)
|
)
|
||||||
image_path = image_path[0]
|
image_path = compose.koji_downloader.get_file(image_path[0])
|
||||||
filename = cmd.get("filename") or os.path.basename(image_path)
|
filename = cmd.get("filename") or os.path.basename(image_path)
|
||||||
destination = os.path.join(cmd["dest_dir"], filename)
|
destination = os.path.join(cmd["dest_dir"], filename)
|
||||||
shutil.copy2(image_path, destination)
|
shutil.copy2(image_path, destination)
|
||||||
|
@ -182,7 +182,9 @@ class LiveMediaThread(WorkerThread):
|
|||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = os.path.realpath(image_info["path"])
|
src_file = compose.koji_downloader.get_file(
|
||||||
|
os.path.realpath(image_info["path"])
|
||||||
|
)
|
||||||
linker.link(src_file, image_dest, link_type=link_type)
|
linker.link(src_file, image_dest, link_type=link_type)
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
|
@ -27,6 +27,35 @@ class OSBuildPhase(
|
|||||||
arches = set(image_conf["arches"]) & arches
|
arches = set(image_conf["arches"]) & arches
|
||||||
return sorted(arches)
|
return sorted(arches)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_repo_urls(compose, repos, arch="$basearch"):
|
||||||
|
"""
|
||||||
|
Get list of repos with resolved repo URLs. Preserve repos defined
|
||||||
|
as dicts.
|
||||||
|
"""
|
||||||
|
resolved_repos = []
|
||||||
|
|
||||||
|
for repo in repos:
|
||||||
|
if isinstance(repo, dict):
|
||||||
|
try:
|
||||||
|
url = repo["baseurl"]
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError(
|
||||||
|
"`baseurl` is required in repo dict %s" % str(repo)
|
||||||
|
)
|
||||||
|
url = util.get_repo_url(compose, url, arch=arch)
|
||||||
|
if url is None:
|
||||||
|
raise RuntimeError("Failed to resolve repo URL for %s" % str(repo))
|
||||||
|
repo["baseurl"] = url
|
||||||
|
resolved_repos.append(repo)
|
||||||
|
else:
|
||||||
|
repo = util.get_repo_url(compose, repo, arch=arch)
|
||||||
|
if repo is None:
|
||||||
|
raise RuntimeError("Failed to resolve repo URL for %s" % repo)
|
||||||
|
resolved_repos.append(repo)
|
||||||
|
|
||||||
|
return resolved_repos
|
||||||
|
|
||||||
def _get_repo(self, image_conf, variant):
|
def _get_repo(self, image_conf, variant):
|
||||||
"""
|
"""
|
||||||
Get a list of repos. First included are those explicitly listed in
|
Get a list of repos. First included are those explicitly listed in
|
||||||
@ -38,7 +67,7 @@ class OSBuildPhase(
|
|||||||
if not variant.is_empty and variant.uid not in repos:
|
if not variant.is_empty and variant.uid not in repos:
|
||||||
repos.append(variant.uid)
|
repos.append(variant.uid)
|
||||||
|
|
||||||
return util.get_repo_urls(self.compose, repos, arch="$arch")
|
return OSBuildPhase._get_repo_urls(self.compose, repos, arch="$arch")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for variant in self.compose.get_variants():
|
for variant in self.compose.get_variants():
|
||||||
@ -183,16 +212,27 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
# image_dir is absolute path to which the image should be copied.
|
# image_dir is absolute path to which the image should be copied.
|
||||||
# We also need the same path as relative to compose directory for
|
# We also need the same path as relative to compose directory for
|
||||||
# including in the metadata.
|
# including in the metadata.
|
||||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
if archive["type_name"] == "iso":
|
||||||
rel_image_dir = compose.paths.compose.image_dir(variant, relative=True) % {
|
# If the produced image is actually an ISO, it should go to
|
||||||
"arch": arch
|
# iso/ subdirectory.
|
||||||
}
|
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||||
|
rel_image_dir = compose.paths.compose.iso_dir(
|
||||||
|
arch, variant, relative=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||||
|
rel_image_dir = compose.paths.compose.image_dir(
|
||||||
|
variant, relative=True
|
||||||
|
) % {"arch": arch}
|
||||||
util.makedirs(image_dir)
|
util.makedirs(image_dir)
|
||||||
|
|
||||||
image_dest = os.path.join(image_dir, archive["filename"])
|
image_dest = os.path.join(image_dir, archive["filename"])
|
||||||
|
|
||||||
src_file = os.path.join(
|
src_file = compose.koji_downloader.get_file(
|
||||||
koji.koji_module.pathinfo.imagebuild(build_info), archive["filename"]
|
os.path.join(
|
||||||
|
koji.koji_module.pathinfo.imagebuild(build_info),
|
||||||
|
archive["filename"],
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||||
@ -209,7 +249,24 @@ class RunOSBuildThread(WorkerThread):
|
|||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
img = Image(compose.im)
|
img = Image(compose.im)
|
||||||
img.type = archive["type_name"]
|
|
||||||
|
# Get the manifest type from the config if supplied, otherwise we
|
||||||
|
# determine the manifest type based on the koji output
|
||||||
|
img.type = config.get("manifest_type")
|
||||||
|
if not img.type:
|
||||||
|
if archive["type_name"] != "iso":
|
||||||
|
img.type = archive["type_name"]
|
||||||
|
else:
|
||||||
|
fn = archive["filename"].lower()
|
||||||
|
if "ostree" in fn:
|
||||||
|
img.type = "dvd-ostree-osbuild"
|
||||||
|
elif "live" in fn:
|
||||||
|
img.type = "live-osbuild"
|
||||||
|
elif "netinst" in fn or "boot" in fn:
|
||||||
|
img.type = "boot"
|
||||||
|
else:
|
||||||
|
img.type = "dvd"
|
||||||
|
|
||||||
img.format = suffix
|
img.format = suffix
|
||||||
img.path = os.path.join(rel_image_dir, archive["filename"])
|
img.path = os.path.join(rel_image_dir, archive["filename"])
|
||||||
img.mtime = util.get_mtime(image_dest)
|
img.mtime = util.get_mtime(image_dest)
|
||||||
|
@ -168,7 +168,9 @@ class OSTreeThread(WorkerThread):
|
|||||||
("unified-core", config.get("unified_core", False)),
|
("unified-core", config.get("unified_core", False)),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
packages = ["pungi", "ostree", "rpm-ostree"]
|
default_packages = ["pungi", "ostree", "rpm-ostree"]
|
||||||
|
additional_packages = config.get("runroot_packages", [])
|
||||||
|
packages = default_packages + additional_packages
|
||||||
log_file = os.path.join(self.logdir, "runroot.log")
|
log_file = os.path.join(self.logdir, "runroot.log")
|
||||||
mounts = [compose.topdir, config["ostree_repo"]]
|
mounts = [compose.topdir, config["ostree_repo"]]
|
||||||
runroot = Runroot(compose, phase="ostree")
|
runroot = Runroot(compose, phase="ostree")
|
||||||
|
@ -38,12 +38,17 @@ from pungi.phases.createrepo import add_modular_metadata
|
|||||||
|
|
||||||
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
||||||
result = {}
|
result = {}
|
||||||
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
|
||||||
for arch in compose.get_arches():
|
for arch in compose.get_arches():
|
||||||
compose.log_info("Populating package set for arch: %s", arch)
|
compose.log_info("Populating package set for arch: %s", arch)
|
||||||
is_multilib = is_arch_multilib(compose.conf, arch)
|
is_multilib = is_arch_multilib(compose.conf, arch)
|
||||||
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
||||||
pkgset = global_pkgset.subset(arch, arches, exclusive_noarch=exclusive_noarch)
|
pkgset = global_pkgset.subset(
|
||||||
|
arch,
|
||||||
|
arches,
|
||||||
|
exclusive_noarch=compose.conf["pkgset_exclusive_arch_considers_noarch"],
|
||||||
|
inherit_to_noarch=compose.conf["pkgset_inherit_exclusive_arch_to_noarch"],
|
||||||
|
)
|
||||||
pkgset.save_file_list(
|
pkgset.save_file_list(
|
||||||
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
||||||
remove_path_prefix=path_prefix,
|
remove_path_prefix=path_prefix,
|
||||||
|
@ -24,10 +24,12 @@ import json
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from six.moves import cPickle as pickle
|
from six.moves import cPickle as pickle
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
import kobo.log
|
import kobo.log
|
||||||
import kobo.pkgset
|
import kobo.pkgset
|
||||||
import kobo.rpmlib
|
import kobo.rpmlib
|
||||||
|
from kobo.shortcuts import compute_file_checksums
|
||||||
|
|
||||||
from kobo.threads import WorkerThread, ThreadPool
|
from kobo.threads import WorkerThread, ThreadPool
|
||||||
|
|
||||||
@ -203,16 +205,31 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
|
|
||||||
return self.rpms_by_arch
|
return self.rpms_by_arch
|
||||||
|
|
||||||
def subset(self, primary_arch, arch_list, exclusive_noarch=True):
|
def subset(
|
||||||
|
self, primary_arch, arch_list, exclusive_noarch=True, inherit_to_noarch=True
|
||||||
|
):
|
||||||
"""Create a subset of this package set that only includes
|
"""Create a subset of this package set that only includes
|
||||||
packages compatible with"""
|
packages compatible with"""
|
||||||
pkgset = PackageSetBase(
|
pkgset = PackageSetBase(
|
||||||
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
||||||
)
|
)
|
||||||
pkgset.merge(self, primary_arch, arch_list, exclusive_noarch=exclusive_noarch)
|
pkgset.merge(
|
||||||
|
self,
|
||||||
|
primary_arch,
|
||||||
|
arch_list,
|
||||||
|
exclusive_noarch=exclusive_noarch,
|
||||||
|
inherit_to_noarch=inherit_to_noarch,
|
||||||
|
)
|
||||||
return pkgset
|
return pkgset
|
||||||
|
|
||||||
def merge(self, other, primary_arch, arch_list, exclusive_noarch=True):
|
def merge(
|
||||||
|
self,
|
||||||
|
other,
|
||||||
|
primary_arch,
|
||||||
|
arch_list,
|
||||||
|
exclusive_noarch=True,
|
||||||
|
inherit_to_noarch=True,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Merge ``other`` package set into this instance.
|
Merge ``other`` package set into this instance.
|
||||||
"""
|
"""
|
||||||
@ -251,7 +268,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
|||||||
if i.file_path in self.file_cache:
|
if i.file_path in self.file_cache:
|
||||||
# TODO: test if it really works
|
# TODO: test if it really works
|
||||||
continue
|
continue
|
||||||
if exclusivearch_list and arch == "noarch":
|
if inherit_to_noarch and exclusivearch_list and arch == "noarch":
|
||||||
if is_excluded(i, exclusivearch_list, logger=self._logger):
|
if is_excluded(i, exclusivearch_list, logger=self._logger):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -318,6 +335,11 @@ class FilelistPackageSet(PackageSetBase):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# This is a marker to indicate package set with only extra builds/tasks and no
|
||||||
|
# tasks.
|
||||||
|
MISSING_KOJI_TAG = object()
|
||||||
|
|
||||||
|
|
||||||
class KojiPackageSet(PackageSetBase):
|
class KojiPackageSet(PackageSetBase):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -334,6 +356,7 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
extra_tasks=None,
|
extra_tasks=None,
|
||||||
signed_packages_retries=0,
|
signed_packages_retries=0,
|
||||||
signed_packages_wait=30,
|
signed_packages_wait=30,
|
||||||
|
downloader=None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Creates new KojiPackageSet.
|
Creates new KojiPackageSet.
|
||||||
@ -371,7 +394,7 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
:param int signed_packages_wait: How long to wait between search attemts.
|
:param int signed_packages_wait: How long to wait between search attemts.
|
||||||
"""
|
"""
|
||||||
super(KojiPackageSet, self).__init__(
|
super(KojiPackageSet, self).__init__(
|
||||||
name,
|
name if name != MISSING_KOJI_TAG else "no-tag",
|
||||||
sigkey_ordering=sigkey_ordering,
|
sigkey_ordering=sigkey_ordering,
|
||||||
arches=arches,
|
arches=arches,
|
||||||
logger=logger,
|
logger=logger,
|
||||||
@ -388,6 +411,8 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
self.signed_packages_retries = signed_packages_retries
|
self.signed_packages_retries = signed_packages_retries
|
||||||
self.signed_packages_wait = signed_packages_wait
|
self.signed_packages_wait = signed_packages_wait
|
||||||
|
|
||||||
|
self.downloader = downloader
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
result = self.__dict__.copy()
|
result = self.__dict__.copy()
|
||||||
del result["koji_wrapper"]
|
del result["koji_wrapper"]
|
||||||
@ -506,11 +531,28 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
# Check if this RPM is coming from scratch task. In this case, we already
|
# Check if this RPM is coming from scratch task. In this case, we already
|
||||||
# know the path.
|
# know the path.
|
||||||
if "path_from_task" in rpm_info:
|
if "path_from_task" in rpm_info:
|
||||||
return rpm_info["path_from_task"]
|
return self.downloader.get_file(rpm_info["path_from_task"])
|
||||||
|
|
||||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||||
paths = []
|
paths = []
|
||||||
|
|
||||||
|
if "getRPMChecksums" in self.koji_proxy.system.listMethods():
|
||||||
|
|
||||||
|
def checksum_validator(keyname, pkg_path):
|
||||||
|
checksums = self.koji_proxy.getRPMChecksums(
|
||||||
|
rpm_info["id"], checksum_types=("sha256",)
|
||||||
|
)
|
||||||
|
if "sha256" in checksums.get(keyname, {}):
|
||||||
|
computed = compute_file_checksums(pkg_path, ("sha256",))
|
||||||
|
if computed["sha256"] != checksums[keyname]["sha256"]:
|
||||||
|
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def checksum_validator(keyname, pkg_path):
|
||||||
|
# Koji doesn't support checksums yet
|
||||||
|
pass
|
||||||
|
|
||||||
attempts_left = self.signed_packages_retries + 1
|
attempts_left = self.signed_packages_retries + 1
|
||||||
while attempts_left > 0:
|
while attempts_left > 0:
|
||||||
for sigkey in self.sigkey_ordering:
|
for sigkey in self.sigkey_ordering:
|
||||||
@ -523,8 +565,11 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
)
|
)
|
||||||
if rpm_path not in paths:
|
if rpm_path not in paths:
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
if os.path.isfile(rpm_path):
|
path = self.downloader.get_file(
|
||||||
return rpm_path
|
rpm_path, partial(checksum_validator, sigkey)
|
||||||
|
)
|
||||||
|
if path:
|
||||||
|
return path
|
||||||
|
|
||||||
# No signed copy was found, wait a little and try again.
|
# No signed copy was found, wait a little and try again.
|
||||||
attempts_left -= 1
|
attempts_left -= 1
|
||||||
@ -537,16 +582,18 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
if os.path.isfile(rpm_path):
|
path = self.downloader.get_file(rpm_path, partial(checksum_validator, ""))
|
||||||
return rpm_path
|
if path:
|
||||||
|
return path
|
||||||
|
|
||||||
if self._allow_invalid_sigkeys and rpm_info["name"] not in self.packages:
|
if self._allow_invalid_sigkeys and rpm_info["name"] not in self.packages:
|
||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
if os.path.isfile(rpm_path):
|
path = self.downloader.get_file(rpm_path)
|
||||||
|
if path:
|
||||||
self._invalid_sigkey_rpms.append(rpm_info)
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
return rpm_path
|
return path
|
||||||
|
|
||||||
self._invalid_sigkey_rpms.append(rpm_info)
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
self.log_error(
|
self.log_error(
|
||||||
@ -567,7 +614,7 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
result_srpms = []
|
result_srpms = []
|
||||||
include_packages = set(include_packages or [])
|
include_packages = set(include_packages or [])
|
||||||
|
|
||||||
if type(event) is dict:
|
if isinstance(event, dict):
|
||||||
event = event["id"]
|
event = event["id"]
|
||||||
|
|
||||||
msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (
|
msg = "Getting latest RPMs (tag: %s, event: %s, inherit: %s)" % (
|
||||||
@ -576,7 +623,9 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
inherit,
|
inherit,
|
||||||
)
|
)
|
||||||
self.log_info("[BEGIN] %s" % msg)
|
self.log_info("[BEGIN] %s" % msg)
|
||||||
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
rpms, builds = [], []
|
||||||
|
if tag != MISSING_KOJI_TAG:
|
||||||
|
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
||||||
extra_rpms, extra_builds = self.get_extra_rpms()
|
extra_rpms, extra_builds = self.get_extra_rpms()
|
||||||
rpms += extra_rpms
|
rpms += extra_rpms
|
||||||
builds += extra_builds
|
builds += extra_builds
|
||||||
@ -681,6 +730,15 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
:param include_packages: an iterable of tuples (package name, arch) that should
|
:param include_packages: an iterable of tuples (package name, arch) that should
|
||||||
be included.
|
be included.
|
||||||
"""
|
"""
|
||||||
|
if len(self.sigkey_ordering) > 1 and (
|
||||||
|
None in self.sigkey_ordering or "" in self.sigkey_ordering
|
||||||
|
):
|
||||||
|
self.log_warning(
|
||||||
|
"Stop writing reuse file as unsigned packages are allowed "
|
||||||
|
"in the compose."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
reuse_file = compose.paths.work.pkgset_reuse_file(self.name)
|
reuse_file = compose.paths.work.pkgset_reuse_file(self.name)
|
||||||
self.log_info("Writing pkgset reuse file: %s" % reuse_file)
|
self.log_info("Writing pkgset reuse file: %s" % reuse_file)
|
||||||
try:
|
try:
|
||||||
@ -697,6 +755,12 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
"srpms_by_name": self.srpms_by_name,
|
"srpms_by_name": self.srpms_by_name,
|
||||||
"extra_builds": self.extra_builds,
|
"extra_builds": self.extra_builds,
|
||||||
"include_packages": include_packages,
|
"include_packages": include_packages,
|
||||||
|
"inherit_to_noarch": compose.conf[
|
||||||
|
"pkgset_inherit_exclusive_arch_to_noarch"
|
||||||
|
],
|
||||||
|
"exclusive_noarch": compose.conf[
|
||||||
|
"pkgset_exclusive_arch_considers_noarch"
|
||||||
|
],
|
||||||
},
|
},
|
||||||
f,
|
f,
|
||||||
protocol=pickle.HIGHEST_PROTOCOL,
|
protocol=pickle.HIGHEST_PROTOCOL,
|
||||||
@ -791,6 +855,8 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
self.log_debug("Failed to load reuse file: %s" % str(e))
|
self.log_debug("Failed to load reuse file: %s" % str(e))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
inherit_to_noarch = compose.conf["pkgset_inherit_exclusive_arch_to_noarch"]
|
||||||
|
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
||||||
if (
|
if (
|
||||||
reuse_data["allow_invalid_sigkeys"] == self._allow_invalid_sigkeys
|
reuse_data["allow_invalid_sigkeys"] == self._allow_invalid_sigkeys
|
||||||
and reuse_data["packages"] == self.packages
|
and reuse_data["packages"] == self.packages
|
||||||
@ -798,6 +864,10 @@ class KojiPackageSet(PackageSetBase):
|
|||||||
and reuse_data["extra_builds"] == self.extra_builds
|
and reuse_data["extra_builds"] == self.extra_builds
|
||||||
and reuse_data["sigkeys"] == self.sigkey_ordering
|
and reuse_data["sigkeys"] == self.sigkey_ordering
|
||||||
and reuse_data["include_packages"] == include_packages
|
and reuse_data["include_packages"] == include_packages
|
||||||
|
# If the value is not present in reuse data, the compose was
|
||||||
|
# generated with older version of Pungi. Best to not reuse.
|
||||||
|
and reuse_data.get("inherit_to_noarch") == inherit_to_noarch
|
||||||
|
and reuse_data.get("exclusive_noarch") == exclusive_noarch
|
||||||
):
|
):
|
||||||
self.log_info("Copying repo data for reuse: %s" % old_repo_dir)
|
self.log_info("Copying repo data for reuse: %s" % old_repo_dir)
|
||||||
copy_all(old_repo_dir, repo_dir)
|
copy_all(old_repo_dir, repo_dir)
|
||||||
|
@ -193,17 +193,13 @@ class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
|||||||
def __call__(self):
|
def __call__(self):
|
||||||
compose = self.compose
|
compose = self.compose
|
||||||
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
||||||
# path prefix must contain trailing '/'
|
package_sets = get_pkgset_from_koji(self.compose, self.koji_wrapper)
|
||||||
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
return (package_sets, self.compose.koji_downloader.path_prefix)
|
||||||
package_sets = get_pkgset_from_koji(
|
|
||||||
self.compose, self.koji_wrapper, path_prefix
|
|
||||||
)
|
|
||||||
return (package_sets, path_prefix)
|
|
||||||
|
|
||||||
|
|
||||||
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
def get_pkgset_from_koji(compose, koji_wrapper):
|
||||||
event_info = get_koji_event_info(compose, koji_wrapper)
|
event_info = get_koji_event_info(compose, koji_wrapper)
|
||||||
return populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)
|
return populate_global_pkgset(compose, koji_wrapper, event_info)
|
||||||
|
|
||||||
|
|
||||||
def _add_module_to_variant(
|
def _add_module_to_variant(
|
||||||
@ -232,7 +228,7 @@ def _add_module_to_variant(
|
|||||||
continue
|
continue
|
||||||
typedir = koji_wrapper.koji_module.pathinfo.typedir(build, archive["btype"])
|
typedir = koji_wrapper.koji_module.pathinfo.typedir(build, archive["btype"])
|
||||||
filename = archive["filename"]
|
filename = archive["filename"]
|
||||||
file_path = os.path.join(typedir, filename)
|
file_path = compose.koji_downloader.get_file(os.path.join(typedir, filename))
|
||||||
try:
|
try:
|
||||||
# If there are two dots, the arch is in the middle. MBS uploads
|
# If there are two dots, the arch is in the middle. MBS uploads
|
||||||
# files with actual architecture in the filename, but Pungi deals
|
# files with actual architecture in the filename, but Pungi deals
|
||||||
@ -270,9 +266,14 @@ def _add_module_to_variant(
|
|||||||
"Module %s does not have metadata for arch %s and is not filtered "
|
"Module %s does not have metadata for arch %s and is not filtered "
|
||||||
"out via filter_modules option." % (nsvc, arch)
|
"out via filter_modules option." % (nsvc, arch)
|
||||||
)
|
)
|
||||||
mod_stream = read_single_module_stream_from_file(
|
try:
|
||||||
mmds[filename], compose, arch, build
|
mod_stream = read_single_module_stream_from_file(
|
||||||
)
|
mmds[filename], compose, arch, build
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
# libmodulemd raises various GLib exceptions with not very helpful
|
||||||
|
# messages. Let's replace it with something more useful.
|
||||||
|
raise RuntimeError("Failed to read %s: %s", mmds[filename], str(exc))
|
||||||
if mod_stream:
|
if mod_stream:
|
||||||
added = True
|
added = True
|
||||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
||||||
@ -395,7 +396,13 @@ def _is_filtered_out(compose, variant, arch, module_name, module_stream):
|
|||||||
|
|
||||||
|
|
||||||
def _get_modules_from_koji(
|
def _get_modules_from_koji(
|
||||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd, exclude_module_ns
|
compose,
|
||||||
|
koji_wrapper,
|
||||||
|
event,
|
||||||
|
variant,
|
||||||
|
variant_tags,
|
||||||
|
tag_to_mmd,
|
||||||
|
exclude_module_ns,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Loads modules for given `variant` from koji `session`, adds them to
|
Loads modules for given `variant` from koji `session`, adds them to
|
||||||
@ -480,7 +487,16 @@ def filter_inherited(koji_proxy, event, module_builds, top_tag):
|
|||||||
# And keep only builds from that topmost tag
|
# And keep only builds from that topmost tag
|
||||||
result.extend(build for build in builds if build["tag_name"] == tag)
|
result.extend(build for build in builds if build["tag_name"] == tag)
|
||||||
|
|
||||||
return result
|
# If the same module was inherited multiple times, it will be in result
|
||||||
|
# multiple times. We need to deduplicate.
|
||||||
|
deduplicated_result = []
|
||||||
|
included_nvrs = set()
|
||||||
|
for build in result:
|
||||||
|
if build["nvr"] not in included_nvrs:
|
||||||
|
deduplicated_result.append(build)
|
||||||
|
included_nvrs.add(build["nvr"])
|
||||||
|
|
||||||
|
return deduplicated_result
|
||||||
|
|
||||||
|
|
||||||
def filter_by_whitelist(compose, module_builds, input_modules, expected_modules):
|
def filter_by_whitelist(compose, module_builds, input_modules, expected_modules):
|
||||||
@ -670,7 +686,7 @@ def _get_modules_from_koji_tags(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
def populate_global_pkgset(compose, koji_wrapper, event):
|
||||||
all_arches = get_all_arches(compose)
|
all_arches = get_all_arches(compose)
|
||||||
|
|
||||||
# List of compose tags from which we create this compose
|
# List of compose tags from which we create this compose
|
||||||
@ -764,7 +780,12 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
|
|
||||||
if extra_modules:
|
if extra_modules:
|
||||||
_add_extra_modules_to_variant(
|
_add_extra_modules_to_variant(
|
||||||
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
compose,
|
||||||
|
koji_wrapper,
|
||||||
|
variant,
|
||||||
|
extra_modules,
|
||||||
|
variant_tags,
|
||||||
|
tag_to_mmd,
|
||||||
)
|
)
|
||||||
|
|
||||||
variant_scratch_modules = get_variant_data(
|
variant_scratch_modules = get_variant_data(
|
||||||
@ -791,17 +812,23 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
|
|
||||||
pkgsets = []
|
pkgsets = []
|
||||||
|
|
||||||
|
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
||||||
|
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
||||||
|
|
||||||
|
if not pkgset_koji_tags and (extra_builds or extra_tasks):
|
||||||
|
# We have extra packages to pull in, but no tag to merge them with.
|
||||||
|
compose_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
||||||
|
pkgset_koji_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
||||||
|
|
||||||
# Get package set for each compose tag and merge it to global package
|
# Get package set for each compose tag and merge it to global package
|
||||||
# list. Also prepare per-variant pkgset, because we do not have list
|
# list. Also prepare per-variant pkgset, because we do not have list
|
||||||
# of binary RPMs in module definition - there is just list of SRPMs.
|
# of binary RPMs in module definition - there is just list of SRPMs.
|
||||||
for compose_tag in compose_tags:
|
for compose_tag in compose_tags:
|
||||||
compose.log_info("Loading package set for tag %s", compose_tag)
|
compose.log_info("Loading package set for tag %s", compose_tag)
|
||||||
|
kwargs = {}
|
||||||
if compose_tag in pkgset_koji_tags:
|
if compose_tag in pkgset_koji_tags:
|
||||||
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
kwargs["extra_builds"] = extra_builds
|
||||||
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
kwargs["extra_tasks"] = extra_tasks
|
||||||
else:
|
|
||||||
extra_builds = []
|
|
||||||
extra_tasks = []
|
|
||||||
|
|
||||||
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
||||||
compose_tag,
|
compose_tag,
|
||||||
@ -813,10 +840,10 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||||
populate_only_packages=populate_only_packages_to_gather,
|
populate_only_packages=populate_only_packages_to_gather,
|
||||||
cache_region=compose.cache_region,
|
cache_region=compose.cache_region,
|
||||||
extra_builds=extra_builds,
|
|
||||||
extra_tasks=extra_tasks,
|
|
||||||
signed_packages_retries=compose.conf["signed_packages_retries"],
|
signed_packages_retries=compose.conf["signed_packages_retries"],
|
||||||
signed_packages_wait=compose.conf["signed_packages_wait"],
|
signed_packages_wait=compose.conf["signed_packages_wait"],
|
||||||
|
downloader=compose.koji_downloader,
|
||||||
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if we have cache for this tag from previous compose. If so, use
|
# Check if we have cache for this tag from previous compose. If so, use
|
||||||
@ -874,13 +901,18 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
if pkgset.reuse is None:
|
if pkgset.reuse is None:
|
||||||
pkgset.populate(
|
pkgset.populate(
|
||||||
compose_tag,
|
compose_tag,
|
||||||
event,
|
# We care about packages as they existed on the specified
|
||||||
|
# event. However, modular content tags are not expected to
|
||||||
|
# change, so the event doesn't matter there. If an exact NSVC
|
||||||
|
# of a module is specified, the code above would happily find
|
||||||
|
# its content tag, but fail here if the content tag doesn't
|
||||||
|
# exist at the given event.
|
||||||
|
event=event if is_traditional else None,
|
||||||
inherit=should_inherit,
|
inherit=should_inherit,
|
||||||
include_packages=modular_packages,
|
include_packages=modular_packages,
|
||||||
)
|
)
|
||||||
for variant in compose.all_variants.values():
|
for variant in compose.all_variants.values():
|
||||||
if compose_tag in variant_tags[variant]:
|
if compose_tag in variant_tags[variant]:
|
||||||
|
|
||||||
# If it's a modular tag, store the package set for the module.
|
# If it's a modular tag, store the package set for the module.
|
||||||
for nsvc, koji_tag in variant.module_uid_to_koji_tag.items():
|
for nsvc, koji_tag in variant.module_uid_to_koji_tag.items():
|
||||||
if compose_tag == koji_tag:
|
if compose_tag == koji_tag:
|
||||||
@ -903,7 +935,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
|||||||
MaterializedPackageSet.create,
|
MaterializedPackageSet.create,
|
||||||
compose,
|
compose,
|
||||||
pkgset,
|
pkgset,
|
||||||
path_prefix,
|
compose.koji_downloader.path_prefix,
|
||||||
mmd=tag_to_mmd.get(pkgset.name),
|
mmd=tag_to_mmd.get(pkgset.name),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
63
pungi/scripts/cache_cleanup.py
Normal file
63
pungi/scripts/cache_cleanup.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from pungi.util import format_size
|
||||||
|
|
||||||
|
|
||||||
|
LOCK_RE = re.compile(r".*\.lock(\|[A-Za-z0-9]+)*$")
|
||||||
|
|
||||||
|
|
||||||
|
def should_be_cleaned_up(path, st, threshold):
|
||||||
|
if st.st_nlink == 1 and st.st_mtime < threshold:
|
||||||
|
# No other instances, older than limit
|
||||||
|
return True
|
||||||
|
|
||||||
|
if LOCK_RE.match(path) and st.st_mtime < threshold:
|
||||||
|
# Suspiciously old lock
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("CACHE_DIR")
|
||||||
|
parser.add_argument("-n", "--dry-run", action="store_true")
|
||||||
|
parser.add_argument("--verbose", action="store_true")
|
||||||
|
parser.add_argument(
|
||||||
|
"--max-age",
|
||||||
|
help="how old files should be considered for deletion",
|
||||||
|
default=7,
|
||||||
|
type=int,
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
topdir = os.path.abspath(args.CACHE_DIR)
|
||||||
|
max_age = args.max_age * 24 * 3600
|
||||||
|
|
||||||
|
cleaned_up = 0
|
||||||
|
|
||||||
|
threshold = time.time() - max_age
|
||||||
|
for dirpath, dirnames, filenames in os.walk(topdir):
|
||||||
|
for f in filenames:
|
||||||
|
filepath = os.path.join(dirpath, f)
|
||||||
|
st = os.stat(filepath)
|
||||||
|
if should_be_cleaned_up(filepath, st, threshold):
|
||||||
|
if args.verbose:
|
||||||
|
print("RM %s" % filepath)
|
||||||
|
cleaned_up += st.st_size
|
||||||
|
if not args.dry_run:
|
||||||
|
os.remove(filepath)
|
||||||
|
if not dirnames and not filenames:
|
||||||
|
if args.verbose:
|
||||||
|
print("RMDIR %s" % dirpath)
|
||||||
|
if not args.dry_run:
|
||||||
|
os.rmdir(dirpath)
|
||||||
|
|
||||||
|
if args.dry_run:
|
||||||
|
print("Would reclaim %s bytes." % format_size(cleaned_up))
|
||||||
|
else:
|
||||||
|
print("Reclaimed %s bytes." % format_size(cleaned_up))
|
@ -171,32 +171,11 @@ def main():
|
|||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--offline", action="store_true", help="Do not resolve git references."
|
"--offline", action="store_true", help="Do not resolve git references."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--multi",
|
|
||||||
metavar="DIR",
|
|
||||||
help=(
|
|
||||||
"Treat source as config for pungi-orchestrate and store dump into "
|
|
||||||
"given directory."
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
defines = config_utils.extract_defines(args.define)
|
defines = config_utils.extract_defines(args.define)
|
||||||
|
|
||||||
if args.multi:
|
|
||||||
if len(args.sources) > 1:
|
|
||||||
parser.error("Only one multi config can be specified.")
|
|
||||||
|
|
||||||
return dump_multi_config(
|
|
||||||
args.sources[0],
|
|
||||||
dest=args.multi,
|
|
||||||
defines=defines,
|
|
||||||
just_dump=args.just_dump,
|
|
||||||
event=args.freeze_event,
|
|
||||||
offline=args.offline,
|
|
||||||
)
|
|
||||||
|
|
||||||
return process_file(
|
return process_file(
|
||||||
args.sources,
|
args.sources,
|
||||||
defines=defines,
|
defines=defines,
|
||||||
|
@ -14,6 +14,9 @@ def send(cmd, data):
|
|||||||
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
||||||
try:
|
try:
|
||||||
msg = fedora_messaging.api.Message(topic="pungi.{}".format(topic), body=data)
|
msg = fedora_messaging.api.Message(topic="pungi.{}".format(topic), body=data)
|
||||||
|
if cmd == "ostree":
|
||||||
|
# https://pagure.io/fedora-infrastructure/issue/10899
|
||||||
|
msg.priority = 3
|
||||||
fedora_messaging.api.publish(msg)
|
fedora_messaging.api.publish(msg)
|
||||||
except fedora_messaging.exceptions.PublishReturned as e:
|
except fedora_messaging.exceptions.PublishReturned as e:
|
||||||
print("Fedora Messaging broker rejected message %s: %s" % (msg.id, e))
|
print("Fedora Messaging broker rejected message %s: %s" % (msg.id, e))
|
||||||
|
@ -319,7 +319,6 @@ def get_arguments(config):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
config = pungi.config.Config()
|
config = pungi.config.Config()
|
||||||
opts = get_arguments(config)
|
opts = get_arguments(config)
|
||||||
|
|
||||||
|
@ -23,6 +23,7 @@ from pungi.phases import PHASES_NAMES
|
|||||||
from pungi import get_full_version, util
|
from pungi import get_full_version, util
|
||||||
from pungi.errors import UnsignedPackagesError
|
from pungi.errors import UnsignedPackagesError
|
||||||
from pungi.wrappers import kojiwrapper
|
from pungi.wrappers import kojiwrapper
|
||||||
|
from pungi.util import rmtree
|
||||||
|
|
||||||
|
|
||||||
# force C locales
|
# force C locales
|
||||||
@ -300,7 +301,12 @@ def main():
|
|||||||
|
|
||||||
if opts.target_dir:
|
if opts.target_dir:
|
||||||
compose_dir = Compose.get_compose_dir(
|
compose_dir = Compose.get_compose_dir(
|
||||||
opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label
|
opts.target_dir,
|
||||||
|
conf,
|
||||||
|
compose_type=compose_type,
|
||||||
|
compose_label=opts.label,
|
||||||
|
parent_compose_ids=opts.parent_compose_id,
|
||||||
|
respin_of=opts.respin_of,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
compose_dir = opts.compose_dir
|
compose_dir = opts.compose_dir
|
||||||
@ -380,6 +386,14 @@ def run_compose(
|
|||||||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||||
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
||||||
|
|
||||||
|
installed_pkgs_log = compose.paths.log.log_file("global", "installed-pkgs")
|
||||||
|
compose.log_info("Logging installed packages to %s" % installed_pkgs_log)
|
||||||
|
try:
|
||||||
|
with open(installed_pkgs_log, "w") as f:
|
||||||
|
subprocess.Popen(["rpm", "-qa"], stdout=f)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_warning("Failed to log installed packages: %s" % str(e))
|
||||||
|
|
||||||
compose.read_variants()
|
compose.read_variants()
|
||||||
|
|
||||||
# dump the config file
|
# dump the config file
|
||||||
@ -671,7 +685,7 @@ def cli_main():
|
|||||||
except (Exception, KeyboardInterrupt) as ex:
|
except (Exception, KeyboardInterrupt) as ex:
|
||||||
if COMPOSE:
|
if COMPOSE:
|
||||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||||
COMPOSE.traceback()
|
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
||||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||||
COMPOSE.write_status("DOOMED")
|
COMPOSE.write_status("DOOMED")
|
||||||
else:
|
else:
|
||||||
@ -680,3 +694,8 @@ def cli_main():
|
|||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
# Remove repositories cloned during ExtraFiles phase
|
||||||
|
process_id = os.getpid()
|
||||||
|
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
||||||
|
rmtree(directoy_to_remove)
|
||||||
|
@ -279,7 +279,7 @@ class GitUrlResolveError(RuntimeError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def resolve_git_ref(repourl, ref):
|
def resolve_git_ref(repourl, ref, credential_helper=None):
|
||||||
"""Resolve a reference in a Git repo to a commit.
|
"""Resolve a reference in a Git repo to a commit.
|
||||||
|
|
||||||
Raises RuntimeError if there was an error. Most likely cause is failure to
|
Raises RuntimeError if there was an error. Most likely cause is failure to
|
||||||
@ -289,7 +289,7 @@ def resolve_git_ref(repourl, ref):
|
|||||||
# This looks like a commit ID already.
|
# This looks like a commit ID already.
|
||||||
return ref
|
return ref
|
||||||
try:
|
try:
|
||||||
_, output = git_ls_remote(repourl, ref)
|
_, output = git_ls_remote(repourl, ref, credential_helper)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
raise GitUrlResolveError(
|
raise GitUrlResolveError(
|
||||||
"ref does not exist in remote repo %s with the error %s %s"
|
"ref does not exist in remote repo %s with the error %s %s"
|
||||||
@ -316,7 +316,7 @@ def resolve_git_ref(repourl, ref):
|
|||||||
return lines[0].split()[0]
|
return lines[0].split()[0]
|
||||||
|
|
||||||
|
|
||||||
def resolve_git_url(url):
|
def resolve_git_url(url, credential_helper=None):
|
||||||
"""Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,
|
"""Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,
|
||||||
replace that specifier with actual SHA1 of the commit.
|
replace that specifier with actual SHA1 of the commit.
|
||||||
|
|
||||||
@ -335,7 +335,7 @@ def resolve_git_url(url):
|
|||||||
scheme = r.scheme.replace("git+", "")
|
scheme = r.scheme.replace("git+", "")
|
||||||
|
|
||||||
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
||||||
fragment = resolve_git_ref(baseurl, ref)
|
fragment = resolve_git_ref(baseurl, ref, credential_helper)
|
||||||
|
|
||||||
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
||||||
if "?#" in url:
|
if "?#" in url:
|
||||||
@ -354,13 +354,18 @@ class GitUrlResolver(object):
|
|||||||
self.offline = offline
|
self.offline = offline
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def __call__(self, url, branch=None):
|
def __call__(self, url, branch=None, options=None):
|
||||||
|
credential_helper = options.get("credential_helper") if options else None
|
||||||
if self.offline:
|
if self.offline:
|
||||||
return branch or url
|
return branch or url
|
||||||
key = (url, branch)
|
key = (url, branch)
|
||||||
if key not in self.cache:
|
if key not in self.cache:
|
||||||
try:
|
try:
|
||||||
res = resolve_git_ref(url, branch) if branch else resolve_git_url(url)
|
res = (
|
||||||
|
resolve_git_ref(url, branch, credential_helper)
|
||||||
|
if branch
|
||||||
|
else resolve_git_url(url, credential_helper)
|
||||||
|
)
|
||||||
self.cache[key] = res
|
self.cache[key] = res
|
||||||
except GitUrlResolveError as exc:
|
except GitUrlResolveError as exc:
|
||||||
self.cache[key] = exc
|
self.cache[key] = exc
|
||||||
@ -456,6 +461,9 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
|||||||
if not variant_uid and "%(variant)s" in i:
|
if not variant_uid and "%(variant)s" in i:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
|
# fmt: off
|
||||||
|
# Black wants to add a comma after kwargs, but that's not valid in
|
||||||
|
# Python 2.7
|
||||||
args = get_format_substs(
|
args = get_format_substs(
|
||||||
compose,
|
compose,
|
||||||
variant=variant_uid,
|
variant=variant_uid,
|
||||||
@ -467,6 +475,7 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
|||||||
base_product_version=base_product_version,
|
base_product_version=base_product_version,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
|
# fmt: on
|
||||||
volid = (i % args).format(**args)
|
volid = (i % args).format(**args)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
@ -991,8 +1000,12 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
|||||||
|
|
||||||
|
|
||||||
@retry(wait_on=RuntimeError)
|
@retry(wait_on=RuntimeError)
|
||||||
def git_ls_remote(baseurl, ref):
|
def git_ls_remote(baseurl, ref, credential_helper=None):
|
||||||
return run(["git", "ls-remote", baseurl, ref], universal_newlines=True)
|
cmd = ["git"]
|
||||||
|
if credential_helper:
|
||||||
|
cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||||
|
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
||||||
|
return run(cmd + ["ls-remote", baseurl, ref], universal_newlines=True)
|
||||||
|
|
||||||
|
|
||||||
def get_tz_offset():
|
def get_tz_offset():
|
||||||
@ -1137,3 +1150,16 @@ def read_json_file(file_path):
|
|||||||
"""A helper function to read a JSON file."""
|
"""A helper function to read a JSON file."""
|
||||||
with open(file_path) as f:
|
with open(file_path) as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
UNITS = ["", "Ki", "Mi", "Gi", "Ti"]
|
||||||
|
|
||||||
|
|
||||||
|
def format_size(sz):
|
||||||
|
sz = float(sz)
|
||||||
|
unit = 0
|
||||||
|
while sz > 1024:
|
||||||
|
sz /= 1024
|
||||||
|
unit += 1
|
||||||
|
|
||||||
|
return "%.3g %sB" % (sz, UNITS[unit])
|
||||||
|
@ -183,15 +183,16 @@ class CompsFilter(object):
|
|||||||
"""
|
"""
|
||||||
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
||||||
for environment in self.tree.xpath("/comps/environment"):
|
for environment in self.tree.xpath("/comps/environment"):
|
||||||
for group in environment.xpath("grouplist/groupid"):
|
for parent_tag in ("grouplist", "optionlist"):
|
||||||
if group.text not in all_groups:
|
for group in environment.xpath("%s/groupid" % parent_tag):
|
||||||
group.getparent().remove(group)
|
if group.text not in all_groups:
|
||||||
|
group.getparent().remove(group)
|
||||||
|
|
||||||
for group in environment.xpath("grouplist/groupid[@arch]"):
|
for group in environment.xpath("%s/groupid[@arch]" % parent_tag):
|
||||||
value = group.attrib.get("arch")
|
value = group.attrib.get("arch")
|
||||||
values = [v for v in re.split(r"[, ]+", value) if v]
|
values = [v for v in re.split(r"[, ]+", value) if v]
|
||||||
if arch not in values:
|
if arch not in values:
|
||||||
group.getparent().remove(group)
|
group.getparent().remove(group)
|
||||||
|
|
||||||
def remove_empty_environments(self):
|
def remove_empty_environments(self):
|
||||||
"""
|
"""
|
||||||
|
@ -260,20 +260,23 @@ def get_isohybrid_cmd(iso_path, arch):
|
|||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def get_manifest_cmd(iso_name, xorriso=False):
|
def get_manifest_cmd(iso_name, xorriso=False, output_file=None):
|
||||||
|
if not output_file:
|
||||||
|
output_file = "%s.manifest" % iso_name
|
||||||
|
|
||||||
if xorriso:
|
if xorriso:
|
||||||
return """xorriso -dev %s --find |
|
return """xorriso -dev %s --find |
|
||||||
tail -n+2 |
|
tail -n+2 |
|
||||||
tr -d "'" |
|
tr -d "'" |
|
||||||
cut -c2- |
|
cut -c2- |
|
||||||
sort >> %s.manifest""" % (
|
sort >> %s""" % (
|
||||||
shlex_quote(iso_name),
|
|
||||||
shlex_quote(iso_name),
|
shlex_quote(iso_name),
|
||||||
|
shlex_quote(output_file),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s" % (
|
||||||
shlex_quote(iso_name),
|
|
||||||
shlex_quote(iso_name),
|
shlex_quote(iso_name),
|
||||||
|
shlex_quote(output_file),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,17 +14,23 @@
|
|||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import socket
|
||||||
|
import shutil
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import contextlib
|
|
||||||
|
import requests
|
||||||
|
|
||||||
import koji
|
import koji
|
||||||
from kobo.shortcuts import run, force_list
|
from kobo.shortcuts import run, force_list
|
||||||
import six
|
import six
|
||||||
from six.moves import configparser, shlex_quote
|
from six.moves import configparser, shlex_quote
|
||||||
import six.moves.xmlrpc_client as xmlrpclib
|
import six.moves.xmlrpc_client as xmlrpclib
|
||||||
|
from flufl.lock import Lock
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
from .. import util
|
from .. import util
|
||||||
from ..arch_utils import getBaseArch
|
from ..arch_utils import getBaseArch
|
||||||
@ -785,11 +791,10 @@ class KojiWrapper(object):
|
|||||||
if list_of_args is None and list_of_kwargs is None:
|
if list_of_args is None and list_of_kwargs is None:
|
||||||
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
||||||
|
|
||||||
if type(list_of_args) not in [type(None), list] or type(list_of_kwargs) not in [
|
if list_of_args is not None and not isinstance(list_of_args, list):
|
||||||
type(None),
|
raise ValueError("list_of_args must be list or None.")
|
||||||
list,
|
if list_of_kwargs is not None and not isinstance(list_of_kwargs, list):
|
||||||
]:
|
raise ValueError("list_of_kwargs must be list or None.")
|
||||||
raise ValueError("list_of_args and list_of_kwargs must be list or None.")
|
|
||||||
|
|
||||||
if list_of_kwargs is None:
|
if list_of_kwargs is None:
|
||||||
list_of_kwargs = [{}] * len(list_of_args)
|
list_of_kwargs = [{}] * len(list_of_args)
|
||||||
@ -803,9 +808,9 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
koji_session.multicall = True
|
koji_session.multicall = True
|
||||||
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
||||||
if type(args) != list:
|
if not isinstance(args, list):
|
||||||
args = [args]
|
args = [args]
|
||||||
if type(kwargs) != dict:
|
if not isinstance(kwargs, dict):
|
||||||
raise ValueError("Every item in list_of_kwargs must be a dict")
|
raise ValueError("Every item in list_of_kwargs must be a dict")
|
||||||
koji_session_fnc(*args, **kwargs)
|
koji_session_fnc(*args, **kwargs)
|
||||||
|
|
||||||
@ -813,7 +818,7 @@ class KojiWrapper(object):
|
|||||||
|
|
||||||
if not responses:
|
if not responses:
|
||||||
return None
|
return None
|
||||||
if type(responses) != list:
|
if not isinstance(responses, list):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Fault element was returned for multicall of method %r: %r"
|
"Fault element was returned for multicall of method %r: %r"
|
||||||
% (koji_session_fnc, responses)
|
% (koji_session_fnc, responses)
|
||||||
@ -829,7 +834,7 @@ class KojiWrapper(object):
|
|||||||
# a one-item array containing the result value,
|
# a one-item array containing the result value,
|
||||||
# or a struct of the form found inside the standard <fault> element.
|
# or a struct of the form found inside the standard <fault> element.
|
||||||
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
||||||
if type(response) == list:
|
if isinstance(response, list):
|
||||||
if not response:
|
if not response:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
||||||
@ -895,3 +900,176 @@ def get_buildroot_rpms(compose, task_id):
|
|||||||
continue
|
continue
|
||||||
result.append(i)
|
result.append(i)
|
||||||
return sorted(result)
|
return sorted(result)
|
||||||
|
|
||||||
|
|
||||||
|
class KojiDownloadProxy:
|
||||||
|
def __init__(self, topdir, topurl, cache_dir, logger):
|
||||||
|
if not topdir:
|
||||||
|
# This will only happen if there is either no koji_profile
|
||||||
|
# configured, or the profile doesn't have a topdir. In the first
|
||||||
|
# case there will be no koji interaction, and the second indicates
|
||||||
|
# broken koji configuration.
|
||||||
|
# We can pretend to have local access in both cases to avoid any
|
||||||
|
# external requests.
|
||||||
|
self.has_local_access = True
|
||||||
|
return
|
||||||
|
|
||||||
|
self.cache_dir = cache_dir
|
||||||
|
self.logger = logger
|
||||||
|
|
||||||
|
self.topdir = topdir
|
||||||
|
self.topurl = topurl
|
||||||
|
|
||||||
|
# If cache directory is configured, we want to use it (even if we
|
||||||
|
# actually have local access to the storage).
|
||||||
|
self.has_local_access = not bool(cache_dir)
|
||||||
|
# This is used for temporary downloaded files. The suffix is unique
|
||||||
|
# per-process. To prevent threads in the same process from colliding, a
|
||||||
|
# thread id is added later.
|
||||||
|
self.unique_suffix = "%s.%s" % (socket.gethostname(), os.getpid())
|
||||||
|
self.session = None
|
||||||
|
if not self.has_local_access:
|
||||||
|
self.session = requests.Session()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_prefix(self):
|
||||||
|
dir = self.topdir if self.has_local_access else self.cache_dir
|
||||||
|
return dir.rstrip("/") + "/"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(klass, conf, logger):
|
||||||
|
topdir = None
|
||||||
|
topurl = None
|
||||||
|
cache_dir = None
|
||||||
|
if "koji_profile" in conf:
|
||||||
|
koji_module = koji.get_profile_module(conf["koji_profile"])
|
||||||
|
topdir = koji_module.config.topdir
|
||||||
|
topurl = koji_module.config.topurl
|
||||||
|
|
||||||
|
cache_dir = conf.get("koji_cache")
|
||||||
|
if cache_dir:
|
||||||
|
cache_dir = cache_dir.rstrip("/") + "/"
|
||||||
|
return klass(topdir, topurl, cache_dir, logger)
|
||||||
|
|
||||||
|
@util.retry(wait_on=requests.exceptions.RequestException)
|
||||||
|
def _download(self, url, dest):
|
||||||
|
"""Download file into given location
|
||||||
|
|
||||||
|
:param str url: URL of the file to download
|
||||||
|
:param str dest: file path to store the result in
|
||||||
|
:returns: path to the downloaded file (same as dest) or None if the URL
|
||||||
|
"""
|
||||||
|
with self.session.get(url, stream=True) as r:
|
||||||
|
if r.status_code == 404:
|
||||||
|
self.logger.warning("GET %s NOT FOUND", url)
|
||||||
|
return None
|
||||||
|
if r.status_code != 200:
|
||||||
|
self.logger.error("GET %s %s", url, r.status_code)
|
||||||
|
r.raise_for_status()
|
||||||
|
# The exception from here will be retried by the decorator.
|
||||||
|
|
||||||
|
file_size = int(r.headers.get("Content-Length", 0))
|
||||||
|
self.logger.info("GET %s OK %s", url, util.format_size(file_size))
|
||||||
|
with open(dest, "wb") as f:
|
||||||
|
shutil.copyfileobj(r.raw, f)
|
||||||
|
return dest
|
||||||
|
|
||||||
|
def _delete(self, path):
|
||||||
|
"""Try to delete file at given path and ignore errors."""
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
except Exception:
|
||||||
|
self.logger.warning("Failed to delete %s", path)
|
||||||
|
|
||||||
|
def _atomic_download(self, url, dest, validator):
|
||||||
|
"""Atomically download a file
|
||||||
|
|
||||||
|
:param str url: URL of the file to download
|
||||||
|
:param str dest: file path to store the result in
|
||||||
|
:returns: path to the downloaded file (same as dest) or None if the URL
|
||||||
|
return 404.
|
||||||
|
"""
|
||||||
|
temp_file = "%s.%s.%s" % (dest, self.unique_suffix, threading.get_ident())
|
||||||
|
|
||||||
|
# First download to the temporary location.
|
||||||
|
try:
|
||||||
|
if self._download(url, temp_file) is None:
|
||||||
|
# The file was not found.
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
# Download failed, let's make sure to clean up potentially partial
|
||||||
|
# temporary file.
|
||||||
|
self._delete(temp_file)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Check if the temporary file is correct (assuming we were provided a
|
||||||
|
# validator function).
|
||||||
|
try:
|
||||||
|
if validator:
|
||||||
|
validator(temp_file)
|
||||||
|
except Exception:
|
||||||
|
# Validation failed. Let's delete the problematic file and re-raise
|
||||||
|
# the exception.
|
||||||
|
self._delete(temp_file)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Atomically move the temporary file into final location
|
||||||
|
os.rename(temp_file, dest)
|
||||||
|
return dest
|
||||||
|
|
||||||
|
def _download_file(self, path, validator):
|
||||||
|
"""Ensure file on Koji volume in ``path`` is present in the local
|
||||||
|
cache.
|
||||||
|
|
||||||
|
:returns: path to the local file or None if file is not found
|
||||||
|
"""
|
||||||
|
url = path.replace(self.topdir, self.topurl)
|
||||||
|
destination_file = path.replace(self.topdir, self.cache_dir)
|
||||||
|
util.makedirs(os.path.dirname(destination_file))
|
||||||
|
|
||||||
|
lock = Lock(destination_file + ".lock")
|
||||||
|
# Hold the lock for this file for 5 minutes. If another compose needs
|
||||||
|
# the same file but it's not downloaded yet, the process will wait.
|
||||||
|
#
|
||||||
|
# If the download finishes in time, the downloaded file will be used
|
||||||
|
# here.
|
||||||
|
#
|
||||||
|
# If the download takes longer, this process will steal the lock and
|
||||||
|
# start its own download.
|
||||||
|
#
|
||||||
|
# That should not be a problem: the same file will be downloaded and
|
||||||
|
# then replaced atomically on the filesystem. If the original process
|
||||||
|
# managed to hardlink the first file already, that hardlink will be
|
||||||
|
# broken, but that will only result in the same file stored twice.
|
||||||
|
lock.lifetime = timedelta(minutes=5)
|
||||||
|
|
||||||
|
with lock:
|
||||||
|
# Check if the file already exists. If yes, return the path.
|
||||||
|
if os.path.exists(destination_file):
|
||||||
|
# Update mtime of the file. This covers the case of packages in the
|
||||||
|
# tag that are not included in the compose. Updating mtime will
|
||||||
|
# exempt them from cleanup for extra time.
|
||||||
|
os.utime(destination_file)
|
||||||
|
return destination_file
|
||||||
|
|
||||||
|
return self._atomic_download(url, destination_file, validator)
|
||||||
|
|
||||||
|
def get_file(self, path, validator=None):
|
||||||
|
"""
|
||||||
|
If path refers to an existing file in Koji, return a valid local path
|
||||||
|
to it. If no such file exists, return None.
|
||||||
|
|
||||||
|
:param validator: A callable that will be called with the path to the
|
||||||
|
downloaded file if and only if the file was actually downloaded.
|
||||||
|
Any exception raised from there will be abort the download and be
|
||||||
|
propagated.
|
||||||
|
"""
|
||||||
|
if self.has_local_access:
|
||||||
|
# We have koji volume mounted locally. No transformation needed for
|
||||||
|
# the path, just check it exists.
|
||||||
|
if os.path.exists(path):
|
||||||
|
return path
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
# We need to download the file.
|
||||||
|
return self._download_file(path, validator)
|
||||||
|
@ -109,55 +109,3 @@ class LoraxWrapper(object):
|
|||||||
# TODO: workdir
|
# TODO: workdir
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def get_buildinstall_cmd(
|
|
||||||
self,
|
|
||||||
product,
|
|
||||||
version,
|
|
||||||
release,
|
|
||||||
repo_baseurl,
|
|
||||||
output_dir,
|
|
||||||
variant=None,
|
|
||||||
bugurl=None,
|
|
||||||
nomacboot=False,
|
|
||||||
noupgrade=False,
|
|
||||||
is_final=False,
|
|
||||||
buildarch=None,
|
|
||||||
volid=None,
|
|
||||||
brand=None,
|
|
||||||
):
|
|
||||||
# RHEL 6 compatibility
|
|
||||||
# Usage: buildinstall [--debug] --version <version> --brand <brand> --product <product> --release <comment> --final [--output outputdir] [--discs <discstring>] <root> # noqa: E501
|
|
||||||
|
|
||||||
brand = brand or "redhat"
|
|
||||||
# HACK: ignore provided release
|
|
||||||
release = "%s %s" % (brand, version)
|
|
||||||
bugurl = bugurl or "https://bugzilla.redhat.com"
|
|
||||||
|
|
||||||
cmd = ["/usr/lib/anaconda-runtime/buildinstall"]
|
|
||||||
|
|
||||||
cmd.append("--debug")
|
|
||||||
|
|
||||||
cmd.extend(["--version", version])
|
|
||||||
cmd.extend(["--brand", brand])
|
|
||||||
cmd.extend(["--product", product])
|
|
||||||
cmd.extend(["--release", release])
|
|
||||||
|
|
||||||
if is_final:
|
|
||||||
cmd.append("--final")
|
|
||||||
|
|
||||||
if buildarch:
|
|
||||||
cmd.extend(["--buildarch", buildarch])
|
|
||||||
|
|
||||||
if bugurl:
|
|
||||||
cmd.extend(["--bugurl", bugurl])
|
|
||||||
|
|
||||||
output_dir = os.path.abspath(output_dir)
|
|
||||||
cmd.extend(["--output", output_dir])
|
|
||||||
|
|
||||||
for i in force_list(repo_baseurl):
|
|
||||||
if "://" not in i:
|
|
||||||
i = "file://%s" % os.path.abspath(i)
|
|
||||||
cmd.append(i)
|
|
||||||
|
|
||||||
return cmd
|
|
||||||
|
@ -20,6 +20,7 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import glob
|
import glob
|
||||||
import six
|
import six
|
||||||
|
import threading
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
from six.moves.urllib.request import urlretrieve
|
from six.moves.urllib.request import urlretrieve
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
@ -29,12 +30,15 @@ from kobo.shortcuts import run, force_list
|
|||||||
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
||||||
from .kojiwrapper import KojiWrapper
|
from .kojiwrapper import KojiWrapper
|
||||||
|
|
||||||
|
lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
class ScmBase(kobo.log.LoggingBase):
|
class ScmBase(kobo.log.LoggingBase):
|
||||||
def __init__(self, logger=None, command=None, compose=None):
|
def __init__(self, logger=None, command=None, compose=None, options=None):
|
||||||
kobo.log.LoggingBase.__init__(self, logger=logger)
|
kobo.log.LoggingBase.__init__(self, logger=logger)
|
||||||
self.command = command
|
self.command = command
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
|
self.options = options or {}
|
||||||
|
|
||||||
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
||||||
def retry_run(self, cmd, **kwargs):
|
def retry_run(self, cmd, **kwargs):
|
||||||
@ -156,22 +160,31 @@ class GitWrapper(ScmBase):
|
|||||||
if "://" not in repo:
|
if "://" not in repo:
|
||||||
repo = "file://%s" % repo
|
repo = "file://%s" % repo
|
||||||
|
|
||||||
|
git_cmd = ["git"]
|
||||||
|
if "credential_helper" in self.options:
|
||||||
|
git_cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||||
|
git_cmd.extend(
|
||||||
|
["-c", "credential.helper=%s" % self.options["credential_helper"]]
|
||||||
|
)
|
||||||
|
|
||||||
run(["git", "init"], workdir=destdir)
|
run(["git", "init"], workdir=destdir)
|
||||||
try:
|
try:
|
||||||
run(["git", "fetch", "--depth=1", repo, branch], workdir=destdir)
|
run(git_cmd + ["fetch", "--depth=1", repo, branch], workdir=destdir)
|
||||||
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
# Fetch failed, to do a full clone we add a remote to our empty
|
# Fetch failed, to do a full clone we add a remote to our empty
|
||||||
# repo, get its content and check out the reference we want.
|
# repo, get its content and check out the reference we want.
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
"Trying to do a full clone because shallow clone failed: %s %s"
|
"Trying to do a full clone because shallow clone failed: %s %s"
|
||||||
% (e, e.output)
|
% (e, getattr(e, "output", ""))
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
# Re-run git init in case of previous failure breaking .git dir
|
# Re-run git init in case of previous failure breaking .git dir
|
||||||
run(["git", "init"], workdir=destdir)
|
run(["git", "init"], workdir=destdir)
|
||||||
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
||||||
self.retry_run(["git", "remote", "update", "origin"], workdir=destdir)
|
self.retry_run(
|
||||||
|
git_cmd + ["remote", "update", "origin"], workdir=destdir
|
||||||
|
)
|
||||||
run(["git", "checkout", branch], workdir=destdir)
|
run(["git", "checkout", branch], workdir=destdir)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
if self.compose:
|
if self.compose:
|
||||||
@ -185,19 +198,38 @@ class GitWrapper(ScmBase):
|
|||||||
copy_all(destdir, debugdir)
|
copy_all(destdir, debugdir)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
self.run_process_command(destdir)
|
def get_temp_repo_path(self, scm_root, scm_branch):
|
||||||
|
scm_repo = scm_root.split("/")[-1]
|
||||||
|
process_id = os.getpid()
|
||||||
|
tmp_dir = (
|
||||||
|
"/tmp/pungi-temp-git-repos-"
|
||||||
|
+ str(process_id)
|
||||||
|
+ "/"
|
||||||
|
+ scm_repo
|
||||||
|
+ "-"
|
||||||
|
+ scm_branch
|
||||||
|
)
|
||||||
|
return tmp_dir
|
||||||
|
|
||||||
|
def setup_repo(self, scm_root, scm_branch):
|
||||||
|
tmp_dir = self.get_temp_repo_path(scm_root, scm_branch)
|
||||||
|
if not os.path.isdir(tmp_dir):
|
||||||
|
makedirs(tmp_dir)
|
||||||
|
self._clone(scm_root, scm_branch, tmp_dir)
|
||||||
|
self.run_process_command(tmp_dir)
|
||||||
|
return tmp_dir
|
||||||
|
|
||||||
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
||||||
scm_dir = scm_dir.lstrip("/")
|
scm_dir = scm_dir.lstrip("/")
|
||||||
scm_branch = scm_branch or "master"
|
scm_branch = scm_branch or "master"
|
||||||
|
|
||||||
with temp_dir() as tmp_dir:
|
self.log_debug(
|
||||||
self.log_debug(
|
"Exporting directory %s from git %s (branch %s)..."
|
||||||
"Exporting directory %s from git %s (branch %s)..."
|
% (scm_dir, scm_root, scm_branch)
|
||||||
% (scm_dir, scm_root, scm_branch)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
self._clone(scm_root, scm_branch, tmp_dir)
|
with lock:
|
||||||
|
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
||||||
|
|
||||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||||
|
|
||||||
@ -205,15 +237,15 @@ class GitWrapper(ScmBase):
|
|||||||
scm_file = scm_file.lstrip("/")
|
scm_file = scm_file.lstrip("/")
|
||||||
scm_branch = scm_branch or "master"
|
scm_branch = scm_branch or "master"
|
||||||
|
|
||||||
with temp_dir() as tmp_dir:
|
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
||||||
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
|
||||||
|
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
"Exporting file %s from git %s (branch %s)..."
|
"Exporting file %s from git %s (branch %s)..."
|
||||||
% (scm_file, scm_root, scm_branch)
|
% (scm_file, scm_root, scm_branch)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._clone(scm_root, scm_branch, tmp_dir)
|
with lock:
|
||||||
|
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
||||||
|
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
||||||
@ -361,15 +393,19 @@ def get_file_from_scm(scm_dict, target_path, compose=None):
|
|||||||
scm_file = os.path.abspath(scm_dict)
|
scm_file = os.path.abspath(scm_dict)
|
||||||
scm_branch = None
|
scm_branch = None
|
||||||
command = None
|
command = None
|
||||||
|
options = {}
|
||||||
else:
|
else:
|
||||||
scm_type = scm_dict["scm"]
|
scm_type = scm_dict["scm"]
|
||||||
scm_repo = scm_dict["repo"]
|
scm_repo = scm_dict["repo"]
|
||||||
scm_file = scm_dict["file"]
|
scm_file = scm_dict["file"]
|
||||||
scm_branch = scm_dict.get("branch", None)
|
scm_branch = scm_dict.get("branch", None)
|
||||||
command = scm_dict.get("command")
|
command = scm_dict.get("command")
|
||||||
|
options = scm_dict.get("options", {})
|
||||||
|
|
||||||
logger = compose._logger if compose else None
|
logger = compose._logger if compose else None
|
||||||
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
scm = _get_wrapper(
|
||||||
|
scm_type, logger=logger, command=command, compose=compose, options=options
|
||||||
|
)
|
||||||
|
|
||||||
files_copied = []
|
files_copied = []
|
||||||
for i in force_list(scm_file):
|
for i in force_list(scm_file):
|
||||||
@ -450,15 +486,19 @@ def get_dir_from_scm(scm_dict, target_path, compose=None):
|
|||||||
scm_dir = os.path.abspath(scm_dict)
|
scm_dir = os.path.abspath(scm_dict)
|
||||||
scm_branch = None
|
scm_branch = None
|
||||||
command = None
|
command = None
|
||||||
|
options = {}
|
||||||
else:
|
else:
|
||||||
scm_type = scm_dict["scm"]
|
scm_type = scm_dict["scm"]
|
||||||
scm_repo = scm_dict.get("repo", None)
|
scm_repo = scm_dict.get("repo", None)
|
||||||
scm_dir = scm_dict["dir"]
|
scm_dir = scm_dict["dir"]
|
||||||
scm_branch = scm_dict.get("branch", None)
|
scm_branch = scm_dict.get("branch", None)
|
||||||
command = scm_dict.get("command")
|
command = scm_dict.get("command")
|
||||||
|
options = scm_dict.get("options", {})
|
||||||
|
|
||||||
logger = compose._logger if compose else None
|
logger = compose._logger if compose else None
|
||||||
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
scm = _get_wrapper(
|
||||||
|
scm_type, logger=logger, command=command, compose=compose, options=options
|
||||||
|
)
|
||||||
|
|
||||||
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
||||||
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
||||||
|
@ -276,7 +276,6 @@ class Variant(object):
|
|||||||
modules=None,
|
modules=None,
|
||||||
modular_koji_tags=None,
|
modular_koji_tags=None,
|
||||||
):
|
):
|
||||||
|
|
||||||
environments = environments or []
|
environments = environments or []
|
||||||
buildinstallpackages = buildinstallpackages or []
|
buildinstallpackages = buildinstallpackages or []
|
||||||
|
|
||||||
|
@ -1,705 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import atexit
|
|
||||||
import errno
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
import kobo.conf
|
|
||||||
import kobo.log
|
|
||||||
import productmd
|
|
||||||
from kobo import shortcuts
|
|
||||||
from six.moves import configparser, shlex_quote
|
|
||||||
|
|
||||||
import pungi.util
|
|
||||||
from pungi.compose import get_compose_dir
|
|
||||||
from pungi.linker import linker_pool
|
|
||||||
from pungi.phases.pkgset.sources.source_koji import get_koji_event_raw
|
|
||||||
from pungi.util import find_old_compose, parse_koji_event, temp_dir
|
|
||||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
|
||||||
|
|
||||||
|
|
||||||
Config = namedtuple(
|
|
||||||
"Config",
|
|
||||||
[
|
|
||||||
# Path to directory with the compose
|
|
||||||
"target",
|
|
||||||
"compose_type",
|
|
||||||
"label",
|
|
||||||
# Path to the selected old compose that will be reused
|
|
||||||
"old_compose",
|
|
||||||
# Path to directory with config file copies
|
|
||||||
"config_dir",
|
|
||||||
# Which koji event to use (if any)
|
|
||||||
"event",
|
|
||||||
# Additional arguments to pungi-koji executable
|
|
||||||
"extra_args",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Status(object):
|
|
||||||
# Ready to start
|
|
||||||
READY = "READY"
|
|
||||||
# Waiting for dependencies to finish.
|
|
||||||
WAITING = "WAITING"
|
|
||||||
# Part is currently running
|
|
||||||
STARTED = "STARTED"
|
|
||||||
# A dependency failed, this one will never start.
|
|
||||||
BLOCKED = "BLOCKED"
|
|
||||||
|
|
||||||
|
|
||||||
class ComposePart(object):
|
|
||||||
def __init__(self, name, config, just_phase=[], skip_phase=[], dependencies=[]):
|
|
||||||
self.name = name
|
|
||||||
self.config = config
|
|
||||||
self.status = Status.WAITING if dependencies else Status.READY
|
|
||||||
self.just_phase = just_phase
|
|
||||||
self.skip_phase = skip_phase
|
|
||||||
self.blocked_on = set(dependencies)
|
|
||||||
self.depends_on = set(dependencies)
|
|
||||||
self.path = None
|
|
||||||
self.log_file = None
|
|
||||||
self.failable = False
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
"ComposePart({0.name!r},"
|
|
||||||
" {0.config!r},"
|
|
||||||
" {0.status!r},"
|
|
||||||
" just_phase={0.just_phase!r},"
|
|
||||||
" skip_phase={0.skip_phase!r},"
|
|
||||||
" dependencies={0.depends_on!r})"
|
|
||||||
).format(self)
|
|
||||||
|
|
||||||
def refresh_status(self):
|
|
||||||
"""Refresh status of this part with the result of the compose. This
|
|
||||||
should only be called once the compose finished.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
with open(os.path.join(self.path, "STATUS")) as fh:
|
|
||||||
self.status = fh.read().strip()
|
|
||||||
except IOError as exc:
|
|
||||||
log.error("Failed to update status of %s: %s", self.name, exc)
|
|
||||||
log.error("Assuming %s is DOOMED", self.name)
|
|
||||||
self.status = "DOOMED"
|
|
||||||
|
|
||||||
def is_finished(self):
|
|
||||||
return "FINISHED" in self.status
|
|
||||||
|
|
||||||
def unblock_on(self, finished_part):
|
|
||||||
"""Update set of blockers for this part. If it's empty, mark us as ready."""
|
|
||||||
self.blocked_on.discard(finished_part)
|
|
||||||
if self.status == Status.WAITING and not self.blocked_on:
|
|
||||||
log.debug("%s is ready to start", self)
|
|
||||||
self.status = Status.READY
|
|
||||||
|
|
||||||
def setup_start(self, global_config, parts):
|
|
||||||
substitutions = dict(
|
|
||||||
("part-%s" % name, p.path) for name, p in parts.items() if p.is_finished()
|
|
||||||
)
|
|
||||||
substitutions["configdir"] = global_config.config_dir
|
|
||||||
|
|
||||||
config = pungi.util.load_config(self.config)
|
|
||||||
|
|
||||||
for f in config.opened_files:
|
|
||||||
# apply substitutions
|
|
||||||
fill_in_config_file(f, substitutions)
|
|
||||||
|
|
||||||
self.status = Status.STARTED
|
|
||||||
self.path = get_compose_dir(
|
|
||||||
os.path.join(global_config.target, "parts"),
|
|
||||||
config,
|
|
||||||
compose_type=global_config.compose_type,
|
|
||||||
compose_label=global_config.label,
|
|
||||||
)
|
|
||||||
self.log_file = os.path.join(global_config.target, "logs", "%s.log" % self.name)
|
|
||||||
log.info("Starting %s in %s", self.name, self.path)
|
|
||||||
|
|
||||||
def get_cmd(self, global_config):
|
|
||||||
cmd = ["pungi-koji", "--config", self.config, "--compose-dir", self.path]
|
|
||||||
cmd.append("--%s" % global_config.compose_type)
|
|
||||||
if global_config.label:
|
|
||||||
cmd.extend(["--label", global_config.label])
|
|
||||||
for phase in self.just_phase:
|
|
||||||
cmd.extend(["--just-phase", phase])
|
|
||||||
for phase in self.skip_phase:
|
|
||||||
cmd.extend(["--skip-phase", phase])
|
|
||||||
if global_config.old_compose:
|
|
||||||
cmd.extend(
|
|
||||||
["--old-compose", os.path.join(global_config.old_compose, "parts")]
|
|
||||||
)
|
|
||||||
if global_config.event:
|
|
||||||
cmd.extend(["--koji-event", str(global_config.event)])
|
|
||||||
if global_config.extra_args:
|
|
||||||
cmd.extend(global_config.extra_args)
|
|
||||||
cmd.extend(["--no-latest-link"])
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_config(cls, config, section, config_dir):
|
|
||||||
part = cls(
|
|
||||||
name=section,
|
|
||||||
config=os.path.join(config_dir, config.get(section, "config")),
|
|
||||||
just_phase=_safe_get_list(config, section, "just_phase", []),
|
|
||||||
skip_phase=_safe_get_list(config, section, "skip_phase", []),
|
|
||||||
dependencies=_safe_get_list(config, section, "depends_on", []),
|
|
||||||
)
|
|
||||||
if config.has_option(section, "failable"):
|
|
||||||
part.failable = config.getboolean(section, "failable")
|
|
||||||
return part
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_get_list(config, section, option, default=None):
|
|
||||||
"""Get a value from config parser. The result is split into a list on
|
|
||||||
commas or spaces, and `default` is returned if the key does not exist.
|
|
||||||
"""
|
|
||||||
if config.has_option(section, option):
|
|
||||||
value = config.get(section, option)
|
|
||||||
return [x.strip() for x in re.split(r"[, ]+", value) if x]
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def fill_in_config_file(fp, substs):
|
|
||||||
"""Templating function. It works with Jinja2 style placeholders such as
|
|
||||||
{{foo}}. Whitespace around the key name is fine. The file is modified in place.
|
|
||||||
|
|
||||||
:param fp string: path to the file to process
|
|
||||||
:param substs dict: a mapping for values to put into the file
|
|
||||||
"""
|
|
||||||
|
|
||||||
def repl(match):
|
|
||||||
try:
|
|
||||||
return substs[match.group(1)]
|
|
||||||
except KeyError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Unknown placeholder %s in %s" % (exc, os.path.basename(fp))
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(fp, "r") as f:
|
|
||||||
contents = re.sub(r"{{ *([a-zA-Z-_]+) *}}", repl, f.read())
|
|
||||||
with open(fp, "w") as f:
|
|
||||||
f.write(contents)
|
|
||||||
|
|
||||||
|
|
||||||
def start_part(global_config, parts, part):
|
|
||||||
part.setup_start(global_config, parts)
|
|
||||||
fh = open(part.log_file, "w")
|
|
||||||
cmd = part.get_cmd(global_config)
|
|
||||||
log.debug("Running command %r", " ".join(shlex_quote(x) for x in cmd))
|
|
||||||
return subprocess.Popen(cmd, stdout=fh, stderr=subprocess.STDOUT)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_finished(global_config, linker, parts, proc, finished_part):
|
|
||||||
finished_part.refresh_status()
|
|
||||||
log.info("%s finished with status %s", finished_part, finished_part.status)
|
|
||||||
if proc.returncode == 0:
|
|
||||||
# Success, unblock other parts...
|
|
||||||
for part in parts.values():
|
|
||||||
part.unblock_on(finished_part.name)
|
|
||||||
# ...and link the results into final destination.
|
|
||||||
copy_part(global_config, linker, finished_part)
|
|
||||||
update_metadata(global_config, finished_part)
|
|
||||||
else:
|
|
||||||
# Failure, other stuff may be blocked.
|
|
||||||
log.info("See details in %s", finished_part.log_file)
|
|
||||||
block_on(parts, finished_part.name)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_part(global_config, linker, part):
|
|
||||||
c = productmd.Compose(part.path)
|
|
||||||
for variant in c.info.variants:
|
|
||||||
data_path = os.path.join(part.path, "compose", variant)
|
|
||||||
link = os.path.join(global_config.target, "compose", variant)
|
|
||||||
log.info("Hardlinking content %s -> %s", data_path, link)
|
|
||||||
hardlink_dir(linker, data_path, link)
|
|
||||||
|
|
||||||
|
|
||||||
def hardlink_dir(linker, srcdir, dstdir):
|
|
||||||
for root, dirs, files in os.walk(srcdir):
|
|
||||||
root = os.path.relpath(root, srcdir)
|
|
||||||
for f in files:
|
|
||||||
src = os.path.normpath(os.path.join(srcdir, root, f))
|
|
||||||
dst = os.path.normpath(os.path.join(dstdir, root, f))
|
|
||||||
linker.queue_put((src, dst))
|
|
||||||
|
|
||||||
|
|
||||||
def update_metadata(global_config, part):
|
|
||||||
part_metadata_dir = os.path.join(part.path, "compose", "metadata")
|
|
||||||
final_metadata_dir = os.path.join(global_config.target, "compose", "metadata")
|
|
||||||
for f in os.listdir(part_metadata_dir):
|
|
||||||
# Load the metadata
|
|
||||||
with open(os.path.join(part_metadata_dir, f)) as fh:
|
|
||||||
part_metadata = json.load(fh)
|
|
||||||
final_metadata = os.path.join(final_metadata_dir, f)
|
|
||||||
if os.path.exists(final_metadata):
|
|
||||||
# We already have this file, will need to merge.
|
|
||||||
merge_metadata(final_metadata, part_metadata)
|
|
||||||
else:
|
|
||||||
# A new file, just copy it.
|
|
||||||
copy_metadata(global_config, final_metadata, part_metadata)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_metadata(global_config, final_metadata, source):
|
|
||||||
"""Copy file to final location, but update compose information."""
|
|
||||||
with open(
|
|
||||||
os.path.join(global_config.target, "compose/metadata/composeinfo.json")
|
|
||||||
) as f:
|
|
||||||
composeinfo = json.load(f)
|
|
||||||
try:
|
|
||||||
source["payload"]["compose"].update(composeinfo["payload"]["compose"])
|
|
||||||
except KeyError:
|
|
||||||
# No [payload][compose], probably OSBS metadata
|
|
||||||
pass
|
|
||||||
with open(final_metadata, "w") as f:
|
|
||||||
json.dump(source, f, indent=2, sort_keys=True)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_metadata(final_metadata, source):
|
|
||||||
with open(final_metadata) as f:
|
|
||||||
metadata = json.load(f)
|
|
||||||
|
|
||||||
try:
|
|
||||||
key = {
|
|
||||||
"productmd.composeinfo": "variants",
|
|
||||||
"productmd.modules": "modules",
|
|
||||||
"productmd.images": "images",
|
|
||||||
"productmd.rpms": "rpms",
|
|
||||||
}[source["header"]["type"]]
|
|
||||||
# TODO what if multiple parts create images for the same variant
|
|
||||||
metadata["payload"][key].update(source["payload"][key])
|
|
||||||
except KeyError:
|
|
||||||
# OSBS metadata, merge whole file
|
|
||||||
metadata.update(source)
|
|
||||||
with open(final_metadata, "w") as f:
|
|
||||||
json.dump(metadata, f, indent=2, sort_keys=True)
|
|
||||||
|
|
||||||
|
|
||||||
def block_on(parts, name):
|
|
||||||
"""Part ``name`` failed, mark everything depending on it as blocked."""
|
|
||||||
for part in parts.values():
|
|
||||||
if name in part.blocked_on:
|
|
||||||
log.warning("%s is blocked now and will not run", part)
|
|
||||||
part.status = Status.BLOCKED
|
|
||||||
block_on(parts, part.name)
|
|
||||||
|
|
||||||
|
|
||||||
def check_finished_processes(processes):
|
|
||||||
"""Walk through all active processes and check if something finished."""
|
|
||||||
for proc in processes.keys():
|
|
||||||
proc.poll()
|
|
||||||
if proc.returncode is not None:
|
|
||||||
yield proc, processes[proc]
|
|
||||||
|
|
||||||
|
|
||||||
def run_all(global_config, parts):
|
|
||||||
# Mapping subprocess.Popen -> ComposePart
|
|
||||||
processes = dict()
|
|
||||||
remaining = set(p.name for p in parts.values() if not p.is_finished())
|
|
||||||
|
|
||||||
with linker_pool("hardlink") as linker:
|
|
||||||
while remaining or processes:
|
|
||||||
update_status(global_config, parts)
|
|
||||||
|
|
||||||
for proc, part in check_finished_processes(processes):
|
|
||||||
del processes[proc]
|
|
||||||
handle_finished(global_config, linker, parts, proc, part)
|
|
||||||
|
|
||||||
# Start new available processes.
|
|
||||||
for name in list(remaining):
|
|
||||||
part = parts[name]
|
|
||||||
# Start all ready parts
|
|
||||||
if part.status == Status.READY:
|
|
||||||
remaining.remove(name)
|
|
||||||
processes[start_part(global_config, parts, part)] = part
|
|
||||||
# Remove blocked parts from todo list
|
|
||||||
elif part.status == Status.BLOCKED:
|
|
||||||
remaining.remove(part.name)
|
|
||||||
|
|
||||||
# Wait for any child process to finish if there is any.
|
|
||||||
if processes:
|
|
||||||
pid, reason = os.wait()
|
|
||||||
for proc in processes.keys():
|
|
||||||
# Set the return code for process that we caught by os.wait().
|
|
||||||
# Calling poll() on it would not set the return code properly
|
|
||||||
# since the value was already consumed by os.wait().
|
|
||||||
if proc.pid == pid:
|
|
||||||
proc.returncode = (reason >> 8) & 0xFF
|
|
||||||
|
|
||||||
log.info("Waiting for linking to finish...")
|
|
||||||
return update_status(global_config, parts)
|
|
||||||
|
|
||||||
|
|
||||||
def get_target_dir(config, compose_info, label, reldir=""):
|
|
||||||
"""Find directory where this compose will be.
|
|
||||||
|
|
||||||
@param reldir: if target path in config is relative, it will be resolved
|
|
||||||
against this directory
|
|
||||||
"""
|
|
||||||
dir = os.path.realpath(os.path.join(reldir, config.get("general", "target")))
|
|
||||||
target_dir = get_compose_dir(
|
|
||||||
dir,
|
|
||||||
compose_info,
|
|
||||||
compose_type=config.get("general", "compose_type"),
|
|
||||||
compose_label=label,
|
|
||||||
)
|
|
||||||
return target_dir
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(debug=False):
|
|
||||||
FORMAT = "%(asctime)s: %(levelname)s: %(message)s"
|
|
||||||
level = logging.DEBUG if debug else logging.INFO
|
|
||||||
kobo.log.add_stderr_logger(log, log_level=level, format=FORMAT)
|
|
||||||
log.setLevel(level)
|
|
||||||
|
|
||||||
|
|
||||||
def compute_status(statuses):
|
|
||||||
if any(map(lambda x: x[0] in ("STARTED", "WAITING"), statuses)):
|
|
||||||
# If there is anything still running or waiting to start, the whole is
|
|
||||||
# still running.
|
|
||||||
return "STARTED"
|
|
||||||
elif any(map(lambda x: x[0] in ("DOOMED", "BLOCKED") and not x[1], statuses)):
|
|
||||||
# If any required part is doomed or blocked, the whole is doomed
|
|
||||||
return "DOOMED"
|
|
||||||
elif all(map(lambda x: x[0] == "FINISHED", statuses)):
|
|
||||||
# If all parts are complete, the whole is complete
|
|
||||||
return "FINISHED"
|
|
||||||
else:
|
|
||||||
return "FINISHED_INCOMPLETE"
|
|
||||||
|
|
||||||
|
|
||||||
def update_status(global_config, parts):
|
|
||||||
log.debug("Updating status metadata")
|
|
||||||
metadata = {}
|
|
||||||
statuses = set()
|
|
||||||
for part in parts.values():
|
|
||||||
metadata[part.name] = {"status": part.status, "path": part.path}
|
|
||||||
statuses.add((part.status, part.failable))
|
|
||||||
metadata_path = os.path.join(
|
|
||||||
global_config.target, "compose", "metadata", "parts.json"
|
|
||||||
)
|
|
||||||
with open(metadata_path, "w") as fh:
|
|
||||||
json.dump(metadata, fh, indent=2, sort_keys=True, separators=(",", ": "))
|
|
||||||
|
|
||||||
status = compute_status(statuses)
|
|
||||||
log.info("Overall status is %s", status)
|
|
||||||
with open(os.path.join(global_config.target, "STATUS"), "w") as fh:
|
|
||||||
fh.write(status)
|
|
||||||
|
|
||||||
return status != "DOOMED"
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_compose_dir(config, args, main_config_file, compose_info):
|
|
||||||
if not hasattr(args, "compose_path"):
|
|
||||||
# Creating a brand new compose
|
|
||||||
target_dir = get_target_dir(
|
|
||||||
config, compose_info, args.label, reldir=os.path.dirname(main_config_file)
|
|
||||||
)
|
|
||||||
for dir in ("logs", "parts", "compose/metadata", "work/global"):
|
|
||||||
try:
|
|
||||||
os.makedirs(os.path.join(target_dir, dir))
|
|
||||||
except OSError as exc:
|
|
||||||
if exc.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
with open(os.path.join(target_dir, "STATUS"), "w") as fh:
|
|
||||||
fh.write("STARTED")
|
|
||||||
# Copy initial composeinfo for new compose
|
|
||||||
shutil.copy(
|
|
||||||
os.path.join(target_dir, "work/global/composeinfo-base.json"),
|
|
||||||
os.path.join(target_dir, "compose/metadata/composeinfo.json"),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Restarting a particular compose
|
|
||||||
target_dir = args.compose_path
|
|
||||||
|
|
||||||
return target_dir
|
|
||||||
|
|
||||||
|
|
||||||
def load_parts_metadata(global_config):
|
|
||||||
parts_metadata = os.path.join(global_config.target, "compose/metadata/parts.json")
|
|
||||||
with open(parts_metadata) as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_for_restart(global_config, parts, to_restart):
|
|
||||||
has_stuff_to_do = False
|
|
||||||
metadata = load_parts_metadata(global_config)
|
|
||||||
for key in metadata:
|
|
||||||
# Update state to match what is on disk
|
|
||||||
log.debug(
|
|
||||||
"Reusing %s (%s) from %s",
|
|
||||||
key,
|
|
||||||
metadata[key]["status"],
|
|
||||||
metadata[key]["path"],
|
|
||||||
)
|
|
||||||
parts[key].status = metadata[key]["status"]
|
|
||||||
parts[key].path = metadata[key]["path"]
|
|
||||||
for key in to_restart:
|
|
||||||
# Set restarted parts to run again
|
|
||||||
parts[key].status = Status.WAITING
|
|
||||||
parts[key].path = None
|
|
||||||
|
|
||||||
for key in to_restart:
|
|
||||||
# Remove blockers that are already finished
|
|
||||||
for blocker in list(parts[key].blocked_on):
|
|
||||||
if parts[blocker].is_finished():
|
|
||||||
parts[key].blocked_on.discard(blocker)
|
|
||||||
if not parts[key].blocked_on:
|
|
||||||
log.debug("Part %s in not blocked", key)
|
|
||||||
# Nothing blocks it; let's go
|
|
||||||
parts[key].status = Status.READY
|
|
||||||
has_stuff_to_do = True
|
|
||||||
|
|
||||||
if not has_stuff_to_do:
|
|
||||||
raise RuntimeError("All restarted parts are blocked. Nothing to do.")
|
|
||||||
|
|
||||||
|
|
||||||
def run_kinit(config):
|
|
||||||
if not config.getboolean("general", "kerberos"):
|
|
||||||
return
|
|
||||||
|
|
||||||
keytab = config.get("general", "kerberos_keytab")
|
|
||||||
principal = config.get("general", "kerberos_principal")
|
|
||||||
|
|
||||||
fd, fname = tempfile.mkstemp(prefix="krb5cc_pungi-orchestrate_")
|
|
||||||
os.close(fd)
|
|
||||||
os.environ["KRB5CCNAME"] = fname
|
|
||||||
shortcuts.run(["kinit", "-k", "-t", keytab, principal])
|
|
||||||
log.debug("Created a kerberos ticket for %s", principal)
|
|
||||||
|
|
||||||
atexit.register(os.remove, fname)
|
|
||||||
|
|
||||||
|
|
||||||
def get_compose_data(compose_path):
|
|
||||||
try:
|
|
||||||
compose = productmd.compose.Compose(compose_path)
|
|
||||||
data = {
|
|
||||||
"compose_id": compose.info.compose.id,
|
|
||||||
"compose_date": compose.info.compose.date,
|
|
||||||
"compose_type": compose.info.compose.type,
|
|
||||||
"compose_respin": str(compose.info.compose.respin),
|
|
||||||
"compose_label": compose.info.compose.label,
|
|
||||||
"release_id": compose.info.release_id,
|
|
||||||
"release_name": compose.info.release.name,
|
|
||||||
"release_short": compose.info.release.short,
|
|
||||||
"release_version": compose.info.release.version,
|
|
||||||
"release_type": compose.info.release.type,
|
|
||||||
"release_is_layered": compose.info.release.is_layered,
|
|
||||||
}
|
|
||||||
if compose.info.release.is_layered:
|
|
||||||
data.update(
|
|
||||||
{
|
|
||||||
"base_product_name": compose.info.base_product.name,
|
|
||||||
"base_product_short": compose.info.base_product.short,
|
|
||||||
"base_product_version": compose.info.base_product.version,
|
|
||||||
"base_product_type": compose.info.base_product.type,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return data
|
|
||||||
except Exception:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def get_script_env(compose_path):
|
|
||||||
env = os.environ.copy()
|
|
||||||
env["COMPOSE_PATH"] = compose_path
|
|
||||||
for key, value in get_compose_data(compose_path).items():
|
|
||||||
if isinstance(value, bool):
|
|
||||||
env[key.upper()] = "YES" if value else ""
|
|
||||||
else:
|
|
||||||
env[key.upper()] = str(value) if value else ""
|
|
||||||
return env
|
|
||||||
|
|
||||||
|
|
||||||
def run_scripts(prefix, compose_dir, scripts):
|
|
||||||
env = get_script_env(compose_dir)
|
|
||||||
for idx, script in enumerate(scripts.strip().splitlines()):
|
|
||||||
command = script.strip()
|
|
||||||
logfile = os.path.join(compose_dir, "logs", "%s%s.log" % (prefix, idx))
|
|
||||||
log.debug("Running command: %r", command)
|
|
||||||
log.debug("See output in %s", logfile)
|
|
||||||
shortcuts.run(command, env=env, logfile=logfile)
|
|
||||||
|
|
||||||
|
|
||||||
def try_translate_path(parts, path):
|
|
||||||
translation = []
|
|
||||||
for part in parts.values():
|
|
||||||
conf = pungi.util.load_config(part.config)
|
|
||||||
translation.extend(conf.get("translate_paths", []))
|
|
||||||
return pungi.util.translate_path_raw(translation, path)
|
|
||||||
|
|
||||||
|
|
||||||
def send_notification(compose_dir, command, parts):
|
|
||||||
if not command:
|
|
||||||
return
|
|
||||||
from pungi.notifier import PungiNotifier
|
|
||||||
|
|
||||||
data = get_compose_data(compose_dir)
|
|
||||||
data["location"] = try_translate_path(parts, compose_dir)
|
|
||||||
notifier = PungiNotifier([command])
|
|
||||||
with open(os.path.join(compose_dir, "STATUS")) as f:
|
|
||||||
status = f.read().strip()
|
|
||||||
notifier.send("status-change", workdir=compose_dir, status=status, **data)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_progress_monitor(global_config, parts):
|
|
||||||
"""Update configuration so that each part send notifications about its
|
|
||||||
progress to the orchestrator.
|
|
||||||
|
|
||||||
There is a file to which the notification is written. The orchestrator is
|
|
||||||
reading it and mapping the entries to particular parts. The path to this
|
|
||||||
file is stored in an environment variable.
|
|
||||||
"""
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(prefix="pungi-progress-monitor_")
|
|
||||||
os.environ["_PUNGI_ORCHESTRATOR_PROGRESS_MONITOR"] = tmp_file.name
|
|
||||||
atexit.register(os.remove, tmp_file.name)
|
|
||||||
|
|
||||||
global_config.extra_args.append(
|
|
||||||
"--notification-script=pungi-notification-report-progress"
|
|
||||||
)
|
|
||||||
|
|
||||||
def reader():
|
|
||||||
while True:
|
|
||||||
line = tmp_file.readline()
|
|
||||||
if not line:
|
|
||||||
time.sleep(0.1)
|
|
||||||
continue
|
|
||||||
path, msg = line.split(":", 1)
|
|
||||||
for part in parts:
|
|
||||||
if parts[part].path == os.path.dirname(path):
|
|
||||||
log.debug("%s: %s", part, msg.strip())
|
|
||||||
break
|
|
||||||
|
|
||||||
monitor = threading.Thread(target=reader)
|
|
||||||
monitor.daemon = True
|
|
||||||
monitor.start()
|
|
||||||
|
|
||||||
|
|
||||||
def run(work_dir, main_config_file, args):
|
|
||||||
config_dir = os.path.join(work_dir, "config")
|
|
||||||
shutil.copytree(os.path.dirname(main_config_file), config_dir)
|
|
||||||
|
|
||||||
# Read main config
|
|
||||||
parser = configparser.RawConfigParser(
|
|
||||||
defaults={
|
|
||||||
"kerberos": "false",
|
|
||||||
"pre_compose_script": "",
|
|
||||||
"post_compose_script": "",
|
|
||||||
"notification_script": "",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
parser.read(main_config_file)
|
|
||||||
|
|
||||||
# Create kerberos ticket
|
|
||||||
run_kinit(parser)
|
|
||||||
|
|
||||||
compose_info = dict(parser.items("general"))
|
|
||||||
compose_type = parser.get("general", "compose_type")
|
|
||||||
|
|
||||||
target_dir = prepare_compose_dir(parser, args, main_config_file, compose_info)
|
|
||||||
kobo.log.add_file_logger(log, os.path.join(target_dir, "logs", "orchestrator.log"))
|
|
||||||
log.info("Composing %s", target_dir)
|
|
||||||
|
|
||||||
run_scripts("pre_compose_", target_dir, parser.get("general", "pre_compose_script"))
|
|
||||||
|
|
||||||
old_compose = find_old_compose(
|
|
||||||
os.path.dirname(target_dir),
|
|
||||||
compose_info["release_short"],
|
|
||||||
compose_info["release_version"],
|
|
||||||
"",
|
|
||||||
)
|
|
||||||
if old_compose:
|
|
||||||
log.info("Reusing old compose %s", old_compose)
|
|
||||||
|
|
||||||
global_config = Config(
|
|
||||||
target=target_dir,
|
|
||||||
compose_type=compose_type,
|
|
||||||
label=args.label,
|
|
||||||
old_compose=old_compose,
|
|
||||||
config_dir=os.path.dirname(main_config_file),
|
|
||||||
event=args.koji_event,
|
|
||||||
extra_args=_safe_get_list(parser, "general", "extra_args"),
|
|
||||||
)
|
|
||||||
|
|
||||||
if not global_config.event and parser.has_option("general", "koji_profile"):
|
|
||||||
koji_wrapper = KojiWrapper(parser.get("general", "koji_profile"))
|
|
||||||
event_file = os.path.join(global_config.target, "work/global/koji-event")
|
|
||||||
result = get_koji_event_raw(koji_wrapper, None, event_file)
|
|
||||||
global_config = global_config._replace(event=result["id"])
|
|
||||||
|
|
||||||
parts = {}
|
|
||||||
for section in parser.sections():
|
|
||||||
if section == "general":
|
|
||||||
continue
|
|
||||||
parts[section] = ComposePart.from_config(parser, section, config_dir)
|
|
||||||
|
|
||||||
if hasattr(args, "part"):
|
|
||||||
setup_for_restart(global_config, parts, args.part)
|
|
||||||
|
|
||||||
setup_progress_monitor(global_config, parts)
|
|
||||||
|
|
||||||
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
|
||||||
|
|
||||||
retcode = run_all(global_config, parts)
|
|
||||||
|
|
||||||
if retcode:
|
|
||||||
# Only run the script if we are not doomed.
|
|
||||||
run_scripts(
|
|
||||||
"post_compose_", target_dir, parser.get("general", "post_compose_script")
|
|
||||||
)
|
|
||||||
|
|
||||||
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
|
||||||
|
|
||||||
return retcode
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args(argv):
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("--debug", action="store_true")
|
|
||||||
parser.add_argument("--koji-event", metavar="ID", type=parse_koji_event)
|
|
||||||
subparsers = parser.add_subparsers()
|
|
||||||
start = subparsers.add_parser("start")
|
|
||||||
start.add_argument("config", metavar="CONFIG")
|
|
||||||
start.add_argument("--label")
|
|
||||||
|
|
||||||
restart = subparsers.add_parser("restart")
|
|
||||||
restart.add_argument("config", metavar="CONFIG")
|
|
||||||
restart.add_argument("compose_path", metavar="COMPOSE_PATH")
|
|
||||||
restart.add_argument(
|
|
||||||
"part", metavar="PART", nargs="*", help="which parts to restart"
|
|
||||||
)
|
|
||||||
restart.add_argument("--label")
|
|
||||||
|
|
||||||
return parser.parse_args(argv)
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
args = parse_args(argv)
|
|
||||||
setup_logging(args.debug)
|
|
||||||
|
|
||||||
main_config_file = os.path.abspath(args.config)
|
|
||||||
|
|
||||||
with temp_dir() as work_dir:
|
|
||||||
try:
|
|
||||||
if not run(work_dir, main_config_file, args):
|
|
||||||
sys.exit(1)
|
|
||||||
except Exception:
|
|
||||||
log.exception("Unhandled exception!")
|
|
||||||
sys.exit(1)
|
|
@ -1,7 +1,8 @@
|
|||||||
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
||||||
dict.sorted
|
dict.sorted
|
||||||
dogpile.cache
|
dogpile.cache
|
||||||
fedmsg
|
flufl.lock ; python_version >= '3.0'
|
||||||
|
flufl.lock < 3.0 ; python_version <= '2.7'
|
||||||
funcsigs
|
funcsigs
|
||||||
jsonschema
|
jsonschema
|
||||||
kobo
|
kobo
|
||||||
|
10
setup.py
10
setup.py
@ -5,14 +5,9 @@
|
|||||||
import os
|
import os
|
||||||
import glob
|
import glob
|
||||||
|
|
||||||
import distutils.command.sdist
|
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
# override default tarball format with bzip2
|
|
||||||
distutils.command.sdist.sdist.default_format = {"posix": "bztar"}
|
|
||||||
|
|
||||||
|
|
||||||
# recursively scan for python modules to be included
|
# recursively scan for python modules to be included
|
||||||
package_root_dirs = ["pungi", "pungi_utils"]
|
package_root_dirs = ["pungi", "pungi_utils"]
|
||||||
packages = set()
|
packages = set()
|
||||||
@ -25,7 +20,7 @@ packages = sorted(packages)
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="pungi",
|
name="pungi",
|
||||||
version="4.3.6",
|
version="4.5.1",
|
||||||
description="Distribution compose tool",
|
description="Distribution compose tool",
|
||||||
url="https://pagure.io/pungi",
|
url="https://pagure.io/pungi",
|
||||||
author="Dennis Gilmore",
|
author="Dennis Gilmore",
|
||||||
@ -41,16 +36,17 @@ setup(
|
|||||||
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||||
"pungi-make-ostree = pungi.ostree:main",
|
"pungi-make-ostree = pungi.ostree:main",
|
||||||
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
||||||
"pungi-orchestrate = pungi_utils.orchestrator:main",
|
|
||||||
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
||||||
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
||||||
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
||||||
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||||
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
||||||
|
"pungi-cache-cleanup = pungi.scripts.cache_cleanup:main",
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
||||||
data_files=[
|
data_files=[
|
||||||
|
("/usr/lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.ks")),
|
("/usr/share/pungi", glob.glob("share/*.ks")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
||||||
|
@ -108,6 +108,7 @@
|
|||||||
<groupid>core</groupid>
|
<groupid>core</groupid>
|
||||||
</grouplist>
|
</grouplist>
|
||||||
<optionlist>
|
<optionlist>
|
||||||
|
<groupid arch="x86_64">standard</groupid>
|
||||||
</optionlist>
|
</optionlist>
|
||||||
</environment>
|
</environment>
|
||||||
|
|
||||||
|
@ -21,6 +21,15 @@ from pungi import paths, checks
|
|||||||
from pungi.module_util import Modulemd
|
from pungi.module_util import Modulemd
|
||||||
|
|
||||||
|
|
||||||
|
GIT_WITH_CREDS = [
|
||||||
|
"git",
|
||||||
|
"-c",
|
||||||
|
"credential.useHttpPath=true",
|
||||||
|
"-c",
|
||||||
|
"credential.helper=!ch",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class BaseTestCase(unittest.TestCase):
|
class BaseTestCase(unittest.TestCase):
|
||||||
def assertFilesEqual(self, fn1, fn2):
|
def assertFilesEqual(self, fn1, fn2):
|
||||||
with open(fn1, "rb") as f1:
|
with open(fn1, "rb") as f1:
|
||||||
@ -158,6 +167,20 @@ class IterableMock(mock.Mock):
|
|||||||
return iter([])
|
return iter([])
|
||||||
|
|
||||||
|
|
||||||
|
class FSKojiDownloader(object):
|
||||||
|
"""Mock for KojiDownloadProxy that checks provided path."""
|
||||||
|
|
||||||
|
def get_file(self, path, validator=None):
|
||||||
|
return path if os.path.isfile(path) else None
|
||||||
|
|
||||||
|
|
||||||
|
class DummyKojiDownloader(object):
|
||||||
|
"""Mock for KojiDownloadProxy that always finds the file in original location."""
|
||||||
|
|
||||||
|
def get_file(self, path, validator=None):
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
class DummyCompose(object):
|
class DummyCompose(object):
|
||||||
def __init__(self, topdir, config):
|
def __init__(self, topdir, config):
|
||||||
self.supported = True
|
self.supported = True
|
||||||
@ -232,6 +255,8 @@ class DummyCompose(object):
|
|||||||
self.cache_region = None
|
self.cache_region = None
|
||||||
self.containers_metadata = {}
|
self.containers_metadata = {}
|
||||||
self.load_old_compose_config = mock.Mock(return_value=None)
|
self.load_old_compose_config = mock.Mock(return_value=None)
|
||||||
|
self.koji_downloader = DummyKojiDownloader()
|
||||||
|
self.koji_downloader.path_prefix = "/prefix"
|
||||||
|
|
||||||
def setup_optional(self):
|
def setup_optional(self):
|
||||||
self.all_variants["Server-optional"] = MockVariant(
|
self.all_variants["Server-optional"] = MockVariant(
|
||||||
@ -272,7 +297,7 @@ class DummyCompose(object):
|
|||||||
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=self.topdir)
|
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=self.topdir)
|
||||||
|
|
||||||
|
|
||||||
def touch(path, content=None):
|
def touch(path, content=None, mode=None):
|
||||||
"""Helper utility that creates an dummy file in given location. Directories
|
"""Helper utility that creates an dummy file in given location. Directories
|
||||||
will be created."""
|
will be created."""
|
||||||
content = content or (path + "\n")
|
content = content or (path + "\n")
|
||||||
@ -284,6 +309,8 @@ def touch(path, content=None):
|
|||||||
content = content.encode()
|
content = content.encode()
|
||||||
with open(path, "wb") as f:
|
with open(path, "wb") as f:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
|
if mode:
|
||||||
|
os.chmod(path, mode)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
@ -473,70 +473,6 @@ class TestBuildinstallPhase(PungiTestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.buildinstall.ThreadPool")
|
|
||||||
@mock.patch("pungi.phases.buildinstall.LoraxWrapper")
|
|
||||||
@mock.patch("pungi.phases.buildinstall.get_volid")
|
|
||||||
def test_starts_threads_for_each_cmd_with_buildinstall(
|
|
||||||
self, get_volid, loraxCls, poolCls
|
|
||||||
):
|
|
||||||
compose = BuildInstallCompose(
|
|
||||||
self.topdir,
|
|
||||||
{
|
|
||||||
"bootable": True,
|
|
||||||
"release_name": "Test",
|
|
||||||
"release_short": "t",
|
|
||||||
"release_version": "1",
|
|
||||||
"buildinstall_method": "buildinstall",
|
|
||||||
"disc_types": {"dvd": "DVD"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
get_volid.return_value = "vol_id"
|
|
||||||
|
|
||||||
phase = BuildinstallPhase(compose, self._make_pkgset_phase(["p1"]))
|
|
||||||
|
|
||||||
phase.run()
|
|
||||||
|
|
||||||
# Two items added for processing in total.
|
|
||||||
pool = poolCls.return_value
|
|
||||||
self.assertEqual(2, len(pool.queue_put.mock_calls))
|
|
||||||
|
|
||||||
# Obtained correct lorax commands.
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
loraxCls.return_value.get_buildinstall_cmd.mock_calls,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
"Test",
|
|
||||||
"1",
|
|
||||||
"1",
|
|
||||||
[self.topdir + "/work/x86_64/repo/p1"],
|
|
||||||
self.topdir + "/work/x86_64/buildinstall",
|
|
||||||
buildarch="x86_64",
|
|
||||||
is_final=True,
|
|
||||||
volid="vol_id",
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
"Test",
|
|
||||||
"1",
|
|
||||||
"1",
|
|
||||||
[self.topdir + "/work/amd64/repo/p1"],
|
|
||||||
self.topdir + "/work/amd64/buildinstall",
|
|
||||||
buildarch="amd64",
|
|
||||||
is_final=True,
|
|
||||||
volid="vol_id",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
get_volid.mock_calls,
|
|
||||||
[
|
|
||||||
mock.call(compose, "x86_64", disc_type="DVD"),
|
|
||||||
mock.call(compose, "amd64", disc_type="DVD"),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("pungi.phases.buildinstall.get_file")
|
@mock.patch("pungi.phases.buildinstall.get_file")
|
||||||
@mock.patch("pungi.phases.buildinstall.ThreadPool")
|
@mock.patch("pungi.phases.buildinstall.ThreadPool")
|
||||||
@mock.patch("pungi.phases.buildinstall.LoraxWrapper")
|
@mock.patch("pungi.phases.buildinstall.LoraxWrapper")
|
||||||
@ -1209,6 +1145,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1308,6 +1245,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"lorax_use_koji_plugin": True,
|
"lorax_use_koji_plugin": True,
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1398,140 +1336,6 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.buildinstall.link_boot_iso")
|
|
||||||
@mock.patch("pungi.phases.buildinstall.tweak_buildinstall")
|
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
|
||||||
@mock.patch("pungi.phases.buildinstall.run")
|
|
||||||
def test_buildinstall_thread_with_buildinstall_in_runroot(
|
|
||||||
self, run, get_buildroot_rpms, KojiWrapperMock, mock_tweak, mock_link
|
|
||||||
):
|
|
||||||
compose = BuildInstallCompose(
|
|
||||||
self.topdir,
|
|
||||||
{
|
|
||||||
"buildinstall_method": "buildinstall",
|
|
||||||
"runroot_tag": "rrt",
|
|
||||||
"koji_profile": "koji",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
get_buildroot_rpms.return_value = ["bash", "zsh"]
|
|
||||||
|
|
||||||
get_runroot_cmd = KojiWrapperMock.return_value.get_runroot_cmd
|
|
||||||
|
|
||||||
run_runroot_cmd = KojiWrapperMock.return_value.run_runroot_cmd
|
|
||||||
run_runroot_cmd.return_value = {
|
|
||||||
"output": "Foo bar baz",
|
|
||||||
"retcode": 0,
|
|
||||||
"task_id": 1234,
|
|
||||||
}
|
|
||||||
|
|
||||||
t = BuildinstallThread(self.pool)
|
|
||||||
|
|
||||||
with mock.patch("time.sleep"):
|
|
||||||
pkgset_phase = self._make_pkgset_phase(["p1"])
|
|
||||||
t.process((compose, "amd64", None, self.cmd, pkgset_phase), 0)
|
|
||||||
|
|
||||||
destdir = os.path.join(self.topdir, "work/amd64/buildinstall")
|
|
||||||
self.assertEqual(
|
|
||||||
get_runroot_cmd.mock_calls,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
"rrt",
|
|
||||||
"amd64",
|
|
||||||
self.cmd,
|
|
||||||
channel=None,
|
|
||||||
use_shell=True,
|
|
||||||
packages=["anaconda"],
|
|
||||||
mounts=[self.topdir],
|
|
||||||
weight=None,
|
|
||||||
chown_paths=[destdir],
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
run_runroot_cmd.mock_calls,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
get_runroot_cmd.return_value,
|
|
||||||
log_file=self.topdir + "/logs/amd64/buildinstall.amd64.log",
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
with open(self.topdir + "/logs/amd64/buildinstall-RPMs.amd64.log") as f:
|
|
||||||
rpms = f.read().strip().split("\n")
|
|
||||||
six.assertCountEqual(self, rpms, ["bash", "zsh"])
|
|
||||||
six.assertCountEqual(self, self.pool.finished_tasks, [(None, "amd64")])
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
mock_tweak.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
compose,
|
|
||||||
destdir,
|
|
||||||
os.path.join(self.topdir, "compose", var, "amd64/os"),
|
|
||||||
"amd64",
|
|
||||||
var,
|
|
||||||
"",
|
|
||||||
"dummy-volid",
|
|
||||||
self.pool.kickstart_file,
|
|
||||||
)
|
|
||||||
for var in ["Client", "Server"]
|
|
||||||
],
|
|
||||||
)
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
mock_link.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(compose, "amd64", compose.variants["Client"], False),
|
|
||||||
mock.call(compose, "amd64", compose.variants["Server"], False),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
|
||||||
@mock.patch("pungi.phases.buildinstall.run")
|
|
||||||
def test_buildinstall_fail_exit_code(
|
|
||||||
self, run, get_buildroot_rpms, KojiWrapperMock
|
|
||||||
):
|
|
||||||
compose = BuildInstallCompose(
|
|
||||||
self.topdir,
|
|
||||||
{
|
|
||||||
"buildinstall_method": "buildinstall",
|
|
||||||
"runroot_tag": "rrt",
|
|
||||||
"koji_profile": "koji",
|
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
get_buildroot_rpms.return_value = ["bash", "zsh"]
|
|
||||||
|
|
||||||
run_runroot_cmd = KojiWrapperMock.return_value.run_runroot_cmd
|
|
||||||
run_runroot_cmd.return_value = {
|
|
||||||
"output": "Foo bar baz",
|
|
||||||
"retcode": 1,
|
|
||||||
"task_id": 1234,
|
|
||||||
}
|
|
||||||
|
|
||||||
t = BuildinstallThread(self.pool)
|
|
||||||
|
|
||||||
with mock.patch("time.sleep"):
|
|
||||||
pkgset_phase = self._make_pkgset_phase(["p1"])
|
|
||||||
t.process((compose, "x86_64", None, self.cmd, pkgset_phase), 0)
|
|
||||||
|
|
||||||
compose._logger.error.assert_has_calls(
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
"[FAIL] Buildinstall (variant None, arch x86_64) failed, but going on anyway." # noqa: E501
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
"Runroot task failed: 1234. See %s/logs/x86_64/buildinstall.x86_64.log for more details." # noqa: E501
|
|
||||||
% self.topdir
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
self.assertEqual(self.pool.finished_tasks, set())
|
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
@mock.patch("pungi.wrappers.kojiwrapper.get_buildroot_rpms")
|
||||||
@mock.patch("pungi.phases.buildinstall.run")
|
@mock.patch("pungi.phases.buildinstall.run")
|
||||||
@ -1542,6 +1346,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1591,6 +1396,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1663,6 +1469,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -1701,6 +1508,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
"buildinstall_topdir": "/buildinstall_topdir",
|
"buildinstall_topdir": "/buildinstall_topdir",
|
||||||
},
|
},
|
||||||
@ -1810,6 +1618,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -628,6 +628,7 @@ class ComposeTestCase(unittest.TestCase):
|
|||||||
ci_copy = dict(self.ci_json)
|
ci_copy = dict(self.ci_json)
|
||||||
ci_copy["header"]["version"] = "1.2"
|
ci_copy["header"]["version"] = "1.2"
|
||||||
mocked_response = mock.MagicMock()
|
mocked_response = mock.MagicMock()
|
||||||
|
mocked_response.status_code = 200
|
||||||
mocked_response.text = json.dumps(self.ci_json)
|
mocked_response.text = json.dumps(self.ci_json)
|
||||||
mocked_requests.post.return_value = mocked_response
|
mocked_requests.post.return_value = mocked_response
|
||||||
|
|
||||||
@ -655,6 +656,7 @@ class ComposeTestCase(unittest.TestCase):
|
|||||||
mocked_requests.post.assert_called_once_with(
|
mocked_requests.post.assert_called_once_with(
|
||||||
"https://cts.localhost.tld/api/1/composes/",
|
"https://cts.localhost.tld/api/1/composes/",
|
||||||
auth=mock.ANY,
|
auth=mock.ANY,
|
||||||
|
data=None,
|
||||||
json=expected_json,
|
json=expected_json,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -793,12 +795,16 @@ class TracebackTest(unittest.TestCase):
|
|||||||
shutil.rmtree(self.tmp_dir)
|
shutil.rmtree(self.tmp_dir)
|
||||||
self.patcher.stop()
|
self.patcher.stop()
|
||||||
|
|
||||||
def assertTraceback(self, filename):
|
def assertTraceback(self, filename, show_locals=True):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.Traceback.mock_calls, [mock.call(), mock.call().get_traceback()]
|
self.Traceback.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(show_locals=show_locals),
|
||||||
|
mock.call(show_locals=show_locals).get_traceback(),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_traceback_default(self):
|
def test_traceback_default(self):
|
||||||
@ -811,6 +817,7 @@ class TracebackTest(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
class RetryRequestTest(unittest.TestCase):
|
class RetryRequestTest(unittest.TestCase):
|
||||||
|
@mock.patch("time.sleep", new=lambda x: x)
|
||||||
@mock.patch("pungi.compose.requests")
|
@mock.patch("pungi.compose.requests")
|
||||||
def test_retry_timeout(self, mocked_requests):
|
def test_retry_timeout(self, mocked_requests):
|
||||||
mocked_requests.post.side_effect = [
|
mocked_requests.post.side_effect = [
|
||||||
@ -822,8 +829,22 @@ class RetryRequestTest(unittest.TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mocked_requests.mock_calls,
|
mocked_requests.mock_calls,
|
||||||
[
|
[
|
||||||
mock.call.post(url, json=None, auth=None),
|
mock.call.post(url, data=None, json=None, auth=None),
|
||||||
mock.call.post(url, json=None, auth=None),
|
mock.call.post(url, data=None, json=None, auth=None),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
self.assertEqual(rv.status_code, 200)
|
self.assertEqual(rv.status_code, 200)
|
||||||
|
|
||||||
|
@mock.patch("pungi.compose.requests")
|
||||||
|
def test_no_retry_on_client_error(self, mocked_requests):
|
||||||
|
mocked_requests.post.side_effect = [
|
||||||
|
mock.Mock(status_code=400, json=lambda: {"message": "You made a mistake"}),
|
||||||
|
]
|
||||||
|
url = "http://locahost/api/1/composes/"
|
||||||
|
with self.assertRaises(RuntimeError):
|
||||||
|
retry_request("post", url)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
mocked_requests.mock_calls,
|
||||||
|
[mock.call.post(url, data=None, json=None, auth=None)],
|
||||||
|
)
|
||||||
|
@ -223,22 +223,6 @@ class BuildinstallConfigTestCase(ConfigTestCase):
|
|||||||
|
|
||||||
self.assertValidation(cfg, [])
|
self.assertValidation(cfg, [])
|
||||||
|
|
||||||
def test_buildinstall_with_lorax_options(self):
|
|
||||||
cfg = load_config(
|
|
||||||
PKGSET_REPOS,
|
|
||||||
buildinstall_method="buildinstall",
|
|
||||||
lorax_options=[("^Server$", {})],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertValidation(
|
|
||||||
cfg,
|
|
||||||
[
|
|
||||||
checks.CONFLICTS.format(
|
|
||||||
"buildinstall_method", "buildinstall", "lorax_options"
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_lorax_with_lorax_options(self):
|
def test_lorax_with_lorax_options(self):
|
||||||
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax", lorax_options=[])
|
cfg = load_config(PKGSET_REPOS, buildinstall_method="lorax", lorax_options=[])
|
||||||
|
|
||||||
@ -440,7 +424,7 @@ class LiveMediaConfigTestCase(ConfigTestCase):
|
|||||||
live_media_version="Rawhide",
|
live_media_version="Rawhide",
|
||||||
)
|
)
|
||||||
|
|
||||||
resolve_git_url.side_effect = lambda x: x.replace("HEAD", "CAFE")
|
resolve_git_url.side_effect = lambda x, _helper: x.replace("HEAD", "CAFE")
|
||||||
|
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
||||||
|
@ -552,6 +552,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
@ -633,6 +634,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"create_jigdo": False,
|
"create_jigdo": False,
|
||||||
"runroot_weights": {"createiso": 123},
|
"runroot_weights": {"createiso": 123},
|
||||||
},
|
},
|
||||||
@ -717,6 +719,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
@ -807,6 +810,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
@ -839,6 +843,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -881,6 +886,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
|||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
from parameterized import parameterized
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
@ -266,58 +267,6 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_bootable_run_buildinstall(self):
|
|
||||||
createiso.write_script(
|
|
||||||
createiso.CreateIsoOpts(
|
|
||||||
output_dir=self.outdir,
|
|
||||||
iso_name="DP-1.0-20160405.t.3-ppc64.iso",
|
|
||||||
volid="DP-1.0-20160405.t.3",
|
|
||||||
graft_points="graft-list",
|
|
||||||
arch="ppc64",
|
|
||||||
buildinstall_method="buildinstall",
|
|
||||||
),
|
|
||||||
self.out,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertScript(
|
|
||||||
[
|
|
||||||
" ".join(
|
|
||||||
[
|
|
||||||
"/usr/bin/genisoimage",
|
|
||||||
"-untranslated-filenames",
|
|
||||||
"-volid",
|
|
||||||
"DP-1.0-20160405.t.3",
|
|
||||||
"-J",
|
|
||||||
"-joliet-long",
|
|
||||||
"-rational-rock",
|
|
||||||
"-translation-table",
|
|
||||||
"-x",
|
|
||||||
"./lost+found",
|
|
||||||
"-part",
|
|
||||||
"-hfs",
|
|
||||||
"-r",
|
|
||||||
"-l",
|
|
||||||
"-sysid",
|
|
||||||
"PPC",
|
|
||||||
"-no-desktop",
|
|
||||||
"-allow-multidot",
|
|
||||||
"-chrp-boot",
|
|
||||||
"-map",
|
|
||||||
"/usr/lib/anaconda-runtime/boot/mapping",
|
|
||||||
"-hfs-bless",
|
|
||||||
"/ppc/mac",
|
|
||||||
"-o",
|
|
||||||
"DP-1.0-20160405.t.3-ppc64.iso",
|
|
||||||
"-graft-points",
|
|
||||||
"-path-list",
|
|
||||||
"graft-list",
|
|
||||||
]
|
|
||||||
),
|
|
||||||
" ".join(["/usr/bin/implantisomd5", "DP-1.0-20160405.t.3-ppc64.iso"]),
|
|
||||||
"isoinfo -R -f -i DP-1.0-20160405.t.3-ppc64.iso | grep -v '/TRANS.TBL$' | sort >> DP-1.0-20160405.t.3-ppc64.iso.manifest", # noqa: E501
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
@mock.patch("sys.stderr")
|
@mock.patch("sys.stderr")
|
||||||
@mock.patch("kobo.shortcuts.run")
|
@mock.patch("kobo.shortcuts.run")
|
||||||
def test_run_with_jigdo_bad_args(self, run, stderr):
|
def test_run_with_jigdo_bad_args(self, run, stderr):
|
||||||
@ -391,3 +340,27 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[("644", 0o644), ("664", 0o664), ("666", 0o666), ("2644", 0o2644)]
|
||||||
|
)
|
||||||
|
def test_get_perms_non_executable(self, test_name, mode):
|
||||||
|
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
||||||
|
self.assertEqual(createiso._get_perms(path), 0o444)
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
("544", 0o544),
|
||||||
|
("554", 0o554),
|
||||||
|
("555", 0o555),
|
||||||
|
("744", 0o744),
|
||||||
|
("755", 0o755),
|
||||||
|
("774", 0o774),
|
||||||
|
("775", 0o775),
|
||||||
|
("777", 0o777),
|
||||||
|
("2775", 0o2775),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_get_perms_executable(self, test_name, mode):
|
||||||
|
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
||||||
|
self.assertEqual(createiso._get_perms(path), 0o555)
|
||||||
|
@ -122,6 +122,7 @@ class ImageContainerThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"translate_paths": [(self.topdir, "http://root")],
|
"translate_paths": [(self.topdir, "http://root")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -35,6 +35,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Client|Server$": [original_image_conf]},
|
"image_build": {"^Client|Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -45,7 +46,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
client_args = {
|
client_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -127,6 +128,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"image_build_version": "Rawhide",
|
"image_build_version": "Rawhide",
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -137,7 +139,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -188,6 +190,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
"image_build_target": "f24",
|
"image_build_target": "f24",
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -196,7 +199,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -251,6 +254,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -261,8 +265,8 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertFalse(phase.pool.add.called)
|
phase.pool.add.assert_not_called()
|
||||||
self.assertFalse(phase.pool.queue_put.called)
|
phase.pool.queue_put.assert_not_called()
|
||||||
|
|
||||||
@mock.patch("pungi.phases.image_build.ThreadPool")
|
@mock.patch("pungi.phases.image_build.ThreadPool")
|
||||||
def test_image_build_set_install_tree(self, ThreadPool):
|
def test_image_build_set_install_tree(self, ThreadPool):
|
||||||
@ -286,6 +290,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -297,9 +302,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertTrue(phase.pool.queue_put.called_once)
|
phase.pool.queue_put.assert_called_once()
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -353,6 +358,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"translate_paths": [("/my", "http://example.com")],
|
"translate_paths": [("/my", "http://example.com")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -364,9 +370,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertTrue(phase.pool.queue_put.called_once)
|
phase.pool.queue_put.assert_called_once()
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -419,6 +425,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -430,9 +437,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertTrue(phase.pool.queue_put.called_once)
|
phase.pool.queue_put.assert_called_once()
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -491,6 +498,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -501,9 +509,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertTrue(phase.pool.queue_put.called_once)
|
phase.pool.queue_put.assert_called_once()
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(args[0][0], compose)
|
self.assertEqual(args[0][0], compose)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@ -559,6 +567,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -569,9 +578,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertTrue(phase.pool.queue_put.called_once)
|
phase.pool.queue_put.assert_called_once()
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
||||||
@ -602,6 +611,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -612,9 +622,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertTrue(phase.pool.queue_put.called_once)
|
phase.pool.queue_put.assert_called_once()
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
args[0][1].get("image_conf", {}).get("image-build", {}).get("release"),
|
||||||
@ -645,6 +655,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -655,9 +666,9 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertTrue(phase.pool.queue_put.called_once)
|
phase.pool.queue_put.assert_called_once()
|
||||||
args, kwargs = phase.pool.queue_put.call_args
|
args, kwargs = phase.pool.queue_put.call_args
|
||||||
self.assertTrue(args[0][1].get("scratch"))
|
self.assertTrue(args[0][1].get("scratch"))
|
||||||
|
|
||||||
@ -681,6 +692,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server-optional$": [original_image_conf]},
|
"image_build": {"^Server-optional$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -692,7 +704,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -744,6 +756,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
{
|
{
|
||||||
"image_build": {"^Server$": [original_image_conf]},
|
"image_build": {"^Server$": [original_image_conf]},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
compose.setup_optional()
|
compose.setup_optional()
|
||||||
@ -755,7 +768,7 @@ class TestImageBuildPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
server_args = {
|
server_args = {
|
||||||
"original_image_conf": original_image_conf,
|
"original_image_conf": original_image_conf,
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -943,7 +956,9 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||||
@mock.patch("pungi.phases.image_build.Linker")
|
@mock.patch("pungi.phases.image_build.Linker")
|
||||||
def test_process_handle_fail(self, Linker, KojiWrapper):
|
def test_process_handle_fail(self, Linker, KojiWrapper):
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
compose = DummyCompose(
|
||||||
|
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||||
|
)
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -1000,7 +1015,9 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||||
@mock.patch("pungi.phases.image_build.Linker")
|
@mock.patch("pungi.phases.image_build.Linker")
|
||||||
def test_process_handle_exception(self, Linker, KojiWrapper):
|
def test_process_handle_exception(self, Linker, KojiWrapper):
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
compose = DummyCompose(
|
||||||
|
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||||
|
)
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
@ -1046,7 +1063,9 @@ class TestCreateImageBuildThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
@mock.patch("pungi.phases.image_build.KojiWrapper")
|
||||||
@mock.patch("pungi.phases.image_build.Linker")
|
@mock.patch("pungi.phases.image_build.Linker")
|
||||||
def test_process_handle_fail_only_one_optional(self, Linker, KojiWrapper):
|
def test_process_handle_fail_only_one_optional(self, Linker, KojiWrapper):
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
compose = DummyCompose(
|
||||||
|
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||||
|
)
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"image_conf": {
|
"image_conf": {
|
||||||
|
@ -28,6 +28,7 @@ def fake_listdir(pattern, result=None, exc=None):
|
|||||||
"""Create a function that mocks os.listdir. If the path contains pattern,
|
"""Create a function that mocks os.listdir. If the path contains pattern,
|
||||||
result will be returned or exc raised. Otherwise it's normal os.listdir
|
result will be returned or exc raised. Otherwise it's normal os.listdir
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# The point of this is to avoid issues on Python 2, where apparently
|
# The point of this is to avoid issues on Python 2, where apparently
|
||||||
# isdir() is using listdir(), so the mocking is breaking it.
|
# isdir() is using listdir(), so the mocking is breaking it.
|
||||||
def worker(path):
|
def worker(path):
|
||||||
|
@ -121,7 +121,6 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_get_image_paths(self):
|
def test_get_image_paths(self):
|
||||||
|
|
||||||
# The data for this tests is obtained from the actual Koji build. It
|
# The data for this tests is obtained from the actual Koji build. It
|
||||||
# includes lots of fields that are not used, but for the sake of
|
# includes lots of fields that are not used, but for the sake of
|
||||||
# completeness is fully preserved.
|
# completeness is fully preserved.
|
||||||
@ -321,7 +320,6 @@ class KojiWrapperTest(KojiWrapperBaseTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_get_image_paths_failed_subtask(self):
|
def test_get_image_paths_failed_subtask(self):
|
||||||
|
|
||||||
failed = set()
|
failed = set()
|
||||||
|
|
||||||
def failed_callback(arch):
|
def failed_callback(arch):
|
||||||
|
@ -43,7 +43,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -124,7 +124,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -192,7 +192,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -265,7 +265,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -363,7 +363,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -433,7 +433,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -503,7 +503,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -571,7 +571,7 @@ class TestLiveImagesPhase(PungiTestCase):
|
|||||||
phase.run()
|
phase.run()
|
||||||
|
|
||||||
# assert at least one thread was started
|
# assert at least one thread was started
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self,
|
self,
|
||||||
@ -958,7 +958,9 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.live_images.run")
|
@mock.patch("pungi.phases.live_images.run")
|
||||||
@mock.patch("pungi.phases.live_images.KojiWrapper")
|
@mock.patch("pungi.phases.live_images.KojiWrapper")
|
||||||
def test_process_handles_fail(self, KojiWrapper, run, copy2):
|
def test_process_handles_fail(self, KojiWrapper, run, copy2):
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
compose = DummyCompose(
|
||||||
|
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||||
|
)
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"ks_file": "/path/to/ks_file",
|
"ks_file": "/path/to/ks_file",
|
||||||
@ -1011,7 +1013,9 @@ class TestCreateLiveImageThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.live_images.run")
|
@mock.patch("pungi.phases.live_images.run")
|
||||||
@mock.patch("pungi.phases.live_images.KojiWrapper")
|
@mock.patch("pungi.phases.live_images.KojiWrapper")
|
||||||
def test_process_handles_exception(self, KojiWrapper, run, copy2):
|
def test_process_handles_exception(self, KojiWrapper, run, copy2):
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
compose = DummyCompose(
|
||||||
|
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||||
|
)
|
||||||
pool = mock.Mock()
|
pool = mock.Mock()
|
||||||
cmd = {
|
cmd = {
|
||||||
"ks_file": "/path/to/ks_file",
|
"ks_file": "/path/to/ks_file",
|
||||||
|
@ -28,6 +28,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -36,7 +37,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -85,6 +86,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -93,7 +95,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -148,6 +150,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -156,7 +159,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -259,6 +262,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -267,7 +271,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
[
|
[
|
||||||
@ -364,6 +368,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -394,6 +399,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -444,7 +450,7 @@ class TestLiveMediaPhase(PungiTestCase):
|
|||||||
phase = LiveMediaPhase(compose)
|
phase = LiveMediaPhase(compose)
|
||||||
|
|
||||||
phase.run()
|
phase.run()
|
||||||
self.assertTrue(phase.pool.add.called)
|
phase.pool.add.assert_called()
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
phase.pool.queue_put.call_args_list,
|
phase.pool.queue_put.call_args_list,
|
||||||
@ -611,7 +617,9 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.livemedia_phase.get_file_size")
|
@mock.patch("pungi.phases.livemedia_phase.get_file_size")
|
||||||
@mock.patch("pungi.phases.livemedia_phase.KojiWrapper")
|
@mock.patch("pungi.phases.livemedia_phase.KojiWrapper")
|
||||||
def test_handle_koji_fail(self, KojiWrapper, get_file_size, get_mtime):
|
def test_handle_koji_fail(self, KojiWrapper, get_file_size, get_mtime):
|
||||||
compose = DummyCompose(self.topdir, {"koji_profile": "koji"})
|
compose = DummyCompose(
|
||||||
|
self.topdir, {"koji_profile": "koji", "koji_cache": "/tmp"}
|
||||||
|
)
|
||||||
config = {
|
config = {
|
||||||
"arches": ["amd64", "x86_64"],
|
"arches": ["amd64", "x86_64"],
|
||||||
"ksfile": "file.ks",
|
"ksfile": "file.ks",
|
||||||
@ -688,6 +696,7 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -757,6 +766,7 @@ class TestLiveMediaThread(PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
"failable_deliverables": [("^.+$", {"*": ["live-media"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -133,7 +133,7 @@ class TestNotifier(unittest.TestCase):
|
|||||||
def test_does_not_run_without_config(self, run, makedirs):
|
def test_does_not_run_without_config(self, run, makedirs):
|
||||||
n = PungiNotifier(None)
|
n = PungiNotifier(None)
|
||||||
n.send("cmd", foo="bar", baz="quux")
|
n.send("cmd", foo="bar", baz="quux")
|
||||||
self.assertFalse(run.called)
|
run.assert_not_called()
|
||||||
|
|
||||||
@mock.patch("pungi.util.translate_path")
|
@mock.patch("pungi.util.translate_path")
|
||||||
@mock.patch("kobo.shortcuts.run")
|
@mock.patch("kobo.shortcuts.run")
|
||||||
@ -146,4 +146,4 @@ class TestNotifier(unittest.TestCase):
|
|||||||
n.send("cmd", **self.data)
|
n.send("cmd", **self.data)
|
||||||
|
|
||||||
self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
|
self.assertEqual(run.call_args_list, [self._call("run-notify", "cmd")])
|
||||||
self.assertTrue(self.compose.log_warning.called)
|
self.compose.log_warning.assert_called()
|
||||||
|
@ -1,934 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import itertools
|
|
||||||
import json
|
|
||||||
from functools import wraps
|
|
||||||
import operator
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
import mock
|
|
||||||
import six
|
|
||||||
from six.moves import configparser
|
|
||||||
|
|
||||||
from parameterized import parameterized
|
|
||||||
|
|
||||||
from tests.helpers import BaseTestCase, PungiTestCase, touch, FIXTURE_DIR
|
|
||||||
from pungi_utils import orchestrator as o
|
|
||||||
|
|
||||||
|
|
||||||
class TestConfigSubstitute(PungiTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super(TestConfigSubstitute, self).setUp()
|
|
||||||
self.fp = os.path.join(self.topdir, "config.conf")
|
|
||||||
|
|
||||||
@parameterized.expand(
|
|
||||||
[
|
|
||||||
("hello = 'world'", "hello = 'world'"),
|
|
||||||
("hello = '{{foo}}'", "hello = 'bar'"),
|
|
||||||
("hello = '{{ foo}}'", "hello = 'bar'"),
|
|
||||||
("hello = '{{foo }}'", "hello = 'bar'"),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
def test_substitutions(self, initial, expected):
|
|
||||||
touch(self.fp, initial)
|
|
||||||
o.fill_in_config_file(self.fp, {"foo": "bar"})
|
|
||||||
with open(self.fp) as f:
|
|
||||||
self.assertEqual(expected, f.read())
|
|
||||||
|
|
||||||
def test_missing_key(self):
|
|
||||||
touch(self.fp, "hello = '{{unknown}}'")
|
|
||||||
with self.assertRaises(RuntimeError) as ctx:
|
|
||||||
o.fill_in_config_file(self.fp, {})
|
|
||||||
self.assertEqual(
|
|
||||||
"Unknown placeholder 'unknown' in config.conf", str(ctx.exception)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestSafeGetList(BaseTestCase):
|
|
||||||
@parameterized.expand(
|
|
||||||
[
|
|
||||||
("", []),
|
|
||||||
("foo", ["foo"]),
|
|
||||||
("foo,bar", ["foo", "bar"]),
|
|
||||||
("foo bar", ["foo", "bar"]),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
def test_success(self, value, expected):
|
|
||||||
cf = configparser.RawConfigParser()
|
|
||||||
cf.add_section("general")
|
|
||||||
cf.set("general", "key", value)
|
|
||||||
self.assertEqual(o._safe_get_list(cf, "general", "key"), expected)
|
|
||||||
|
|
||||||
def test_default(self):
|
|
||||||
cf = configparser.RawConfigParser()
|
|
||||||
cf.add_section("general")
|
|
||||||
self.assertEqual(o._safe_get_list(cf, "general", "missing", "hello"), "hello")
|
|
||||||
|
|
||||||
|
|
||||||
class TestComposePart(PungiTestCase):
|
|
||||||
def test_from_minimal_config(self):
|
|
||||||
cf = configparser.RawConfigParser()
|
|
||||||
cf.add_section("test")
|
|
||||||
cf.set("test", "config", "my.conf")
|
|
||||||
|
|
||||||
part = o.ComposePart.from_config(cf, "test", "/tmp/config")
|
|
||||||
deps = "set()" if six.PY3 else "set([])"
|
|
||||||
self.assertEqual(str(part), "test")
|
|
||||||
self.assertEqual(
|
|
||||||
repr(part),
|
|
||||||
"ComposePart('test', '/tmp/config/my.conf', 'READY', "
|
|
||||||
"just_phase=[], skip_phase=[], dependencies=%s)" % deps,
|
|
||||||
)
|
|
||||||
self.assertFalse(part.failable)
|
|
||||||
|
|
||||||
def test_from_full_config(self):
|
|
||||||
cf = configparser.RawConfigParser()
|
|
||||||
cf.add_section("test")
|
|
||||||
cf.set("test", "config", "my.conf")
|
|
||||||
cf.set("test", "depends_on", "base")
|
|
||||||
cf.set("test", "skip_phase", "skip")
|
|
||||||
cf.set("test", "just_phase", "just")
|
|
||||||
cf.set("test", "failable", "yes")
|
|
||||||
|
|
||||||
part = o.ComposePart.from_config(cf, "test", "/tmp/config")
|
|
||||||
deps = "{'base'}" if six.PY3 else "set(['base'])"
|
|
||||||
self.assertEqual(
|
|
||||||
repr(part),
|
|
||||||
"ComposePart('test', '/tmp/config/my.conf', 'WAITING', "
|
|
||||||
"just_phase=['just'], skip_phase=['skip'], dependencies=%s)" % deps,
|
|
||||||
)
|
|
||||||
self.assertTrue(part.failable)
|
|
||||||
|
|
||||||
def test_get_cmd(self):
|
|
||||||
conf = o.Config(
|
|
||||||
"/tgt/", "production", "RC-1.0", "/old", "/cfg", 1234, ["--quiet"]
|
|
||||||
)
|
|
||||||
part = o.ComposePart(
|
|
||||||
"test", "/tmp/my.conf", just_phase=["just"], skip_phase=["skip"]
|
|
||||||
)
|
|
||||||
part.path = "/compose"
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
part.get_cmd(conf),
|
|
||||||
[
|
|
||||||
"pungi-koji",
|
|
||||||
"--config",
|
|
||||||
"/tmp/my.conf",
|
|
||||||
"--compose-dir",
|
|
||||||
"/compose",
|
|
||||||
"--production",
|
|
||||||
"--label",
|
|
||||||
"RC-1.0",
|
|
||||||
"--just-phase",
|
|
||||||
"just",
|
|
||||||
"--skip-phase",
|
|
||||||
"skip",
|
|
||||||
"--old-compose",
|
|
||||||
"/old/parts",
|
|
||||||
"--koji-event",
|
|
||||||
"1234",
|
|
||||||
"--quiet",
|
|
||||||
"--no-latest-link",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_refresh_status(self):
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf")
|
|
||||||
part.path = os.path.join(self.topdir)
|
|
||||||
touch(os.path.join(self.topdir, "STATUS"), "FINISHED")
|
|
||||||
part.refresh_status()
|
|
||||||
self.assertEqual(part.status, "FINISHED")
|
|
||||||
|
|
||||||
def test_refresh_status_missing_file(self):
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf")
|
|
||||||
part.path = os.path.join(self.topdir)
|
|
||||||
part.refresh_status()
|
|
||||||
self.assertEqual(part.status, "DOOMED")
|
|
||||||
|
|
||||||
@parameterized.expand(["FINISHED", "FINISHED_INCOMPLETE"])
|
|
||||||
def test_is_finished(self, status):
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf")
|
|
||||||
part.status = status
|
|
||||||
self.assertTrue(part.is_finished())
|
|
||||||
|
|
||||||
@parameterized.expand(["STARTED", "WAITING"])
|
|
||||||
def test_is_not_finished(self, status):
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf")
|
|
||||||
part.status = status
|
|
||||||
self.assertFalse(part.is_finished())
|
|
||||||
|
|
||||||
@mock.patch("pungi_utils.orchestrator.fill_in_config_file")
|
|
||||||
@mock.patch("pungi_utils.orchestrator.get_compose_dir")
|
|
||||||
@mock.patch("kobo.conf.PyConfigParser")
|
|
||||||
def test_setup_start(self, Conf, gcd, ficf):
|
|
||||||
def pth(*path):
|
|
||||||
return os.path.join(self.topdir, *path)
|
|
||||||
|
|
||||||
conf = o.Config(
|
|
||||||
pth("tgt"), "production", "RC-1.0", "/old", pth("cfg"), None, None
|
|
||||||
)
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf")
|
|
||||||
parts = {"base": mock.Mock(path="/base", is_finished=lambda: True)}
|
|
||||||
Conf.return_value.opened_files = ["foo.conf"]
|
|
||||||
|
|
||||||
part.setup_start(conf, parts)
|
|
||||||
|
|
||||||
self.assertEqual(part.status, "STARTED")
|
|
||||||
self.assertEqual(part.path, gcd.return_value)
|
|
||||||
self.assertEqual(part.log_file, pth("tgt", "logs", "test.log"))
|
|
||||||
self.assertEqual(
|
|
||||||
ficf.call_args_list,
|
|
||||||
[mock.call("foo.conf", {"part-base": "/base", "configdir": pth("cfg")})],
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
gcd.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
pth("tgt/parts"),
|
|
||||||
Conf.return_value,
|
|
||||||
compose_type="production",
|
|
||||||
compose_label="RC-1.0",
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@parameterized.expand(
|
|
||||||
[
|
|
||||||
# Nothing blocking, no change
|
|
||||||
([], [], o.Status.READY),
|
|
||||||
# Remove last blocker and switch to READY
|
|
||||||
(["finished"], [], o.Status.READY),
|
|
||||||
# Blocker remaining, stay in WAITING
|
|
||||||
(["finished", "block"], ["block"], o.Status.WAITING),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
def test_unblock_on(self, deps, blockers, status):
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf", dependencies=deps)
|
|
||||||
part.unblock_on("finished")
|
|
||||||
six.assertCountEqual(self, part.blocked_on, blockers)
|
|
||||||
self.assertEqual(part.status, status)
|
|
||||||
|
|
||||||
|
|
||||||
class TestStartPart(PungiTestCase):
|
|
||||||
@mock.patch("subprocess.Popen")
|
|
||||||
def test_start(self, Popen):
|
|
||||||
part = mock.Mock(log_file=os.path.join(self.topdir, "log"))
|
|
||||||
config = mock.Mock()
|
|
||||||
parts = mock.Mock()
|
|
||||||
cmd = ["pungi-koji", "..."]
|
|
||||||
|
|
||||||
part.get_cmd.return_value = cmd
|
|
||||||
|
|
||||||
proc = o.start_part(config, parts, part)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
part.mock_calls,
|
|
||||||
[mock.call.setup_start(config, parts), mock.call.get_cmd(config)],
|
|
||||||
)
|
|
||||||
self.assertEqual(proc, Popen.return_value)
|
|
||||||
self.assertEqual(
|
|
||||||
Popen.call_args_list,
|
|
||||||
[mock.call(cmd, stdout=mock.ANY, stderr=subprocess.STDOUT)],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestHandleFinished(BaseTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.config = mock.Mock()
|
|
||||||
self.linker = mock.Mock()
|
|
||||||
self.parts = {"a": mock.Mock(), "b": mock.Mock()}
|
|
||||||
|
|
||||||
@mock.patch("pungi_utils.orchestrator.update_metadata")
|
|
||||||
@mock.patch("pungi_utils.orchestrator.copy_part")
|
|
||||||
def test_handle_success(self, cp, um):
|
|
||||||
proc = mock.Mock(returncode=0)
|
|
||||||
o.handle_finished(self.config, self.linker, self.parts, proc, self.parts["a"])
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
self.parts["a"].mock_calls,
|
|
||||||
[mock.call.refresh_status(), mock.call.unblock_on(self.parts["a"].name)],
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
self.parts["b"].mock_calls, [mock.call.unblock_on(self.parts["a"].name)]
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
cp.call_args_list, [mock.call(self.config, self.linker, self.parts["a"])]
|
|
||||||
)
|
|
||||||
self.assertEqual(um.call_args_list, [mock.call(self.config, self.parts["a"])])
|
|
||||||
|
|
||||||
@mock.patch("pungi_utils.orchestrator.block_on")
|
|
||||||
def test_handle_failure(self, bo):
|
|
||||||
proc = mock.Mock(returncode=1)
|
|
||||||
o.handle_finished(self.config, self.linker, self.parts, proc, self.parts["a"])
|
|
||||||
|
|
||||||
self.assertEqual(self.parts["a"].mock_calls, [mock.call.refresh_status()])
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
bo.call_args_list, [mock.call(self.parts, self.parts["a"].name)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestBlockOn(BaseTestCase):
|
|
||||||
def test_single(self):
|
|
||||||
parts = {"b": o.ComposePart("b", "b.conf", dependencies=["a"])}
|
|
||||||
|
|
||||||
o.block_on(parts, "a")
|
|
||||||
|
|
||||||
self.assertEqual(parts["b"].status, o.Status.BLOCKED)
|
|
||||||
|
|
||||||
def test_chain(self):
|
|
||||||
parts = {
|
|
||||||
"b": o.ComposePart("b", "b.conf", dependencies=["a"]),
|
|
||||||
"c": o.ComposePart("c", "c.conf", dependencies=["b"]),
|
|
||||||
"d": o.ComposePart("d", "d.conf", dependencies=["c"]),
|
|
||||||
}
|
|
||||||
|
|
||||||
o.block_on(parts, "a")
|
|
||||||
|
|
||||||
self.assertEqual(parts["b"].status, o.Status.BLOCKED)
|
|
||||||
self.assertEqual(parts["c"].status, o.Status.BLOCKED)
|
|
||||||
self.assertEqual(parts["d"].status, o.Status.BLOCKED)
|
|
||||||
|
|
||||||
|
|
||||||
class TestUpdateMetadata(PungiTestCase):
|
|
||||||
def assertEqualJSON(self, f1, f2):
|
|
||||||
with open(f1) as f:
|
|
||||||
actual = json.load(f)
|
|
||||||
with open(f2) as f:
|
|
||||||
expected = json.load(f)
|
|
||||||
self.assertEqual(actual, expected)
|
|
||||||
|
|
||||||
def assertEqualMetadata(self, expected):
|
|
||||||
expected_dir = os.path.join(FIXTURE_DIR, expected, "compose/metadata")
|
|
||||||
for f in os.listdir(expected_dir):
|
|
||||||
self.assertEqualJSON(
|
|
||||||
os.path.join(self.tgt, "compose/metadata", f),
|
|
||||||
os.path.join(expected_dir, f),
|
|
||||||
)
|
|
||||||
|
|
||||||
@parameterized.expand(["empty-metadata", "basic-metadata"])
|
|
||||||
def test_merge_into_empty(self, fixture):
|
|
||||||
self.tgt = os.path.join(self.topdir, "target")
|
|
||||||
|
|
||||||
conf = o.Config(self.tgt, "production", None, None, None, None, [])
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf")
|
|
||||||
part.path = os.path.join(FIXTURE_DIR, "DP-1.0-20181001.n.0")
|
|
||||||
|
|
||||||
shutil.copytree(os.path.join(FIXTURE_DIR, fixture), self.tgt)
|
|
||||||
|
|
||||||
o.update_metadata(conf, part)
|
|
||||||
|
|
||||||
self.assertEqualMetadata(fixture + "-merged")
|
|
||||||
|
|
||||||
|
|
||||||
class TestCopyPart(PungiTestCase):
|
|
||||||
@mock.patch("pungi_utils.orchestrator.hardlink_dir")
|
|
||||||
def test_copy(self, hd):
|
|
||||||
self.tgt = os.path.join(self.topdir, "target")
|
|
||||||
conf = o.Config(self.tgt, "production", None, None, None, None, [])
|
|
||||||
linker = mock.Mock()
|
|
||||||
part = o.ComposePart("test", "/tmp/my.conf")
|
|
||||||
part.path = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
|
||||||
|
|
||||||
o.copy_part(conf, linker, part)
|
|
||||||
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
hd.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
linker,
|
|
||||||
os.path.join(part.path, "compose", variant),
|
|
||||||
os.path.join(self.tgt, "compose", variant),
|
|
||||||
)
|
|
||||||
for variant in ["Client", "Server"]
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestHardlinkDir(PungiTestCase):
|
|
||||||
def test_hardlinking(self):
|
|
||||||
linker = mock.Mock()
|
|
||||||
src = os.path.join(self.topdir, "src")
|
|
||||||
dst = os.path.join(self.topdir, "dst")
|
|
||||||
files = ["file.txt", "nested/deep/another.txt"]
|
|
||||||
|
|
||||||
for f in files:
|
|
||||||
touch(os.path.join(src, f))
|
|
||||||
|
|
||||||
o.hardlink_dir(linker, src, dst)
|
|
||||||
|
|
||||||
six.assertCountEqual(
|
|
||||||
self,
|
|
||||||
linker.queue_put.call_args_list,
|
|
||||||
[mock.call((os.path.join(src, f), os.path.join(dst, f))) for f in files],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestCheckFinishedProcesses(BaseTestCase):
|
|
||||||
def test_nothing_finished(self):
|
|
||||||
k1 = mock.Mock(returncode=None)
|
|
||||||
v1 = mock.Mock()
|
|
||||||
processes = {k1: v1}
|
|
||||||
|
|
||||||
six.assertCountEqual(self, o.check_finished_processes(processes), [])
|
|
||||||
|
|
||||||
def test_yields_finished(self):
|
|
||||||
k1 = mock.Mock(returncode=None)
|
|
||||||
v1 = mock.Mock()
|
|
||||||
k2 = mock.Mock(returncode=0)
|
|
||||||
v2 = mock.Mock()
|
|
||||||
processes = {k1: v1, k2: v2}
|
|
||||||
|
|
||||||
six.assertCountEqual(self, o.check_finished_processes(processes), [(k2, v2)])
|
|
||||||
|
|
||||||
def test_yields_failed(self):
|
|
||||||
k1 = mock.Mock(returncode=1)
|
|
||||||
v1 = mock.Mock()
|
|
||||||
processes = {k1: v1}
|
|
||||||
|
|
||||||
six.assertCountEqual(self, o.check_finished_processes(processes), [(k1, v1)])
|
|
||||||
|
|
||||||
|
|
||||||
class _Part(object):
|
|
||||||
def __init__(self, name, parent=None, fails=False, status=None):
|
|
||||||
self.name = name
|
|
||||||
self.finished = False
|
|
||||||
self.status = o.Status.WAITING if parent else o.Status.READY
|
|
||||||
if status:
|
|
||||||
self.status = status
|
|
||||||
self.proc = mock.Mock(name="proc_%s" % name, pid=hash(self))
|
|
||||||
self.parent = parent
|
|
||||||
self.fails = fails
|
|
||||||
self.failable = False
|
|
||||||
self.path = "/path/to/%s" % name
|
|
||||||
self.blocked_on = set([parent]) if parent else set()
|
|
||||||
|
|
||||||
def is_finished(self):
|
|
||||||
return self.finished or self.status == "FINISHED"
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<_Part(%r, parent=%r)>" % (self.name, self.parent)
|
|
||||||
|
|
||||||
|
|
||||||
def with_mocks(parts, finish_order, wait_results):
|
|
||||||
"""Setup all mocks and create dict with the parts.
|
|
||||||
:param finish_order: nested list: first element contains parts that finish
|
|
||||||
in first iteration, etc.
|
|
||||||
:param wait_results: list of names of processes that are returned by wait in each
|
|
||||||
iteration
|
|
||||||
"""
|
|
||||||
|
|
||||||
def decorator(func):
|
|
||||||
@wraps(func)
|
|
||||||
def worker(self, lp, update_status, cfp, hf, sp, wait):
|
|
||||||
self.parts = dict((p.name, p) for p in parts)
|
|
||||||
self.linker = lp.return_value.__enter__.return_value
|
|
||||||
|
|
||||||
update_status.side_effect = self.mock_update
|
|
||||||
hf.side_effect = self.mock_finish
|
|
||||||
sp.side_effect = self.mock_start
|
|
||||||
|
|
||||||
finish = [[]]
|
|
||||||
for grp in finish_order:
|
|
||||||
finish.append([(self.parts[p].proc, self.parts[p]) for p in grp])
|
|
||||||
|
|
||||||
cfp.side_effect = finish
|
|
||||||
wait.side_effect = [(self.parts[p].proc.pid, 0) for p in wait_results]
|
|
||||||
|
|
||||||
func(self)
|
|
||||||
|
|
||||||
self.assertEqual(lp.call_args_list, [mock.call("hardlink")])
|
|
||||||
|
|
||||||
return worker
|
|
||||||
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("os.wait")
|
|
||||||
@mock.patch("pungi_utils.orchestrator.start_part")
|
|
||||||
@mock.patch("pungi_utils.orchestrator.handle_finished")
|
|
||||||
@mock.patch("pungi_utils.orchestrator.check_finished_processes")
|
|
||||||
@mock.patch("pungi_utils.orchestrator.update_status")
|
|
||||||
@mock.patch("pungi_utils.orchestrator.linker_pool")
|
|
||||||
class TestRunAll(BaseTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.maxDiff = None
|
|
||||||
self.conf = mock.Mock(name="global_config")
|
|
||||||
self.calls = []
|
|
||||||
|
|
||||||
def mock_update(self, global_config, parts):
|
|
||||||
self.assertEqual(global_config, self.conf)
|
|
||||||
self.assertEqual(parts, self.parts)
|
|
||||||
self.calls.append("update_status")
|
|
||||||
|
|
||||||
def mock_start(self, global_config, parts, part):
|
|
||||||
self.assertEqual(global_config, self.conf)
|
|
||||||
self.assertEqual(parts, self.parts)
|
|
||||||
self.calls.append(("start_part", part.name))
|
|
||||||
part.status = o.Status.STARTED
|
|
||||||
return part.proc
|
|
||||||
|
|
||||||
@property
|
|
||||||
def sorted_calls(self):
|
|
||||||
"""Sort the consecutive calls of the same function based on the argument."""
|
|
||||||
|
|
||||||
def key(val):
|
|
||||||
return val[0] if isinstance(val, tuple) else val
|
|
||||||
|
|
||||||
return list(
|
|
||||||
itertools.chain.from_iterable(
|
|
||||||
sorted(grp, key=operator.itemgetter(1))
|
|
||||||
for _, grp in itertools.groupby(self.calls, key)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def mock_finish(self, global_config, linker, parts, proc, part):
|
|
||||||
self.assertEqual(global_config, self.conf)
|
|
||||||
self.assertEqual(linker, self.linker)
|
|
||||||
self.assertEqual(parts, self.parts)
|
|
||||||
self.calls.append(("handle_finished", part.name))
|
|
||||||
for child in parts.values():
|
|
||||||
if child.parent == part.name:
|
|
||||||
child.status = o.Status.BLOCKED if part.fails else o.Status.READY
|
|
||||||
part.status = "DOOMED" if part.fails else "FINISHED"
|
|
||||||
|
|
||||||
@with_mocks(
|
|
||||||
[_Part("fst"), _Part("snd", parent="fst")], [["fst"], ["snd"]], ["fst", "snd"]
|
|
||||||
)
|
|
||||||
def test_sequential(self):
|
|
||||||
o.run_all(self.conf, self.parts)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
self.sorted_calls,
|
|
||||||
[
|
|
||||||
# First iteration starts fst
|
|
||||||
"update_status",
|
|
||||||
("start_part", "fst"),
|
|
||||||
# Second iteration handles finish of fst and starts snd
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "fst"),
|
|
||||||
("start_part", "snd"),
|
|
||||||
# Third iteration handles finish of snd
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "snd"),
|
|
||||||
# Final update of status
|
|
||||||
"update_status",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@with_mocks([_Part("fst"), _Part("snd")], [["fst", "snd"]], ["fst"])
|
|
||||||
def test_parallel(self):
|
|
||||||
o.run_all(self.conf, self.parts)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
self.sorted_calls,
|
|
||||||
[
|
|
||||||
# First iteration starts both fst and snd
|
|
||||||
"update_status",
|
|
||||||
("start_part", "fst"),
|
|
||||||
("start_part", "snd"),
|
|
||||||
# Second iteration handles finish of both of them
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "fst"),
|
|
||||||
("handle_finished", "snd"),
|
|
||||||
# Final update of status
|
|
||||||
"update_status",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@with_mocks(
|
|
||||||
[_Part("1"), _Part("2", parent="1"), _Part("3", parent="1")],
|
|
||||||
[["1"], ["2", "3"]],
|
|
||||||
["1", "2"],
|
|
||||||
)
|
|
||||||
def test_waits_for_dep_then_parallel_with_simultaneous_end(self):
|
|
||||||
o.run_all(self.conf, self.parts)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
self.sorted_calls,
|
|
||||||
[
|
|
||||||
# First iteration starts first part
|
|
||||||
"update_status",
|
|
||||||
("start_part", "1"),
|
|
||||||
# Second iteration starts 2 and 3
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "1"),
|
|
||||||
("start_part", "2"),
|
|
||||||
("start_part", "3"),
|
|
||||||
# Both 2 and 3 end in third iteration
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "2"),
|
|
||||||
("handle_finished", "3"),
|
|
||||||
# Final update of status
|
|
||||||
"update_status",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@with_mocks(
|
|
||||||
[_Part("1"), _Part("2", parent="1"), _Part("3", parent="1")],
|
|
||||||
[["1"], ["3"], ["2"]],
|
|
||||||
["1", "3", "2"],
|
|
||||||
)
|
|
||||||
def test_waits_for_dep_then_parallel_with_different_end_times(self):
|
|
||||||
o.run_all(self.conf, self.parts)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
self.sorted_calls,
|
|
||||||
[
|
|
||||||
# First iteration starts first part
|
|
||||||
"update_status",
|
|
||||||
("start_part", "1"),
|
|
||||||
# Second iteration starts 2 and 3
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "1"),
|
|
||||||
("start_part", "2"),
|
|
||||||
("start_part", "3"),
|
|
||||||
# Third iteration sees 3 finish
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "3"),
|
|
||||||
# Fourth iteration, 2 finishes
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "2"),
|
|
||||||
# Final update of status
|
|
||||||
"update_status",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
@with_mocks(
|
|
||||||
[_Part("fst", fails=True), _Part("snd", parent="fst")], [["fst"]], ["fst"]
|
|
||||||
)
|
|
||||||
def test_blocked(self):
|
|
||||||
o.run_all(self.conf, self.parts)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
self.sorted_calls,
|
|
||||||
[
|
|
||||||
# First iteration starts first part
|
|
||||||
"update_status",
|
|
||||||
("start_part", "fst"),
|
|
||||||
# Second iteration handles fail of first part
|
|
||||||
"update_status",
|
|
||||||
("handle_finished", "fst"),
|
|
||||||
# Final update of status
|
|
||||||
"update_status",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("pungi_utils.orchestrator.get_compose_dir")
|
|
||||||
class TestGetTargetDir(BaseTestCase):
|
|
||||||
def test_with_absolute_path(self, gcd):
|
|
||||||
config = {"target": "/tgt", "compose_type": "nightly"}
|
|
||||||
cfg = mock.Mock()
|
|
||||||
cfg.get.side_effect = lambda _, k: config[k]
|
|
||||||
ci = mock.Mock()
|
|
||||||
res = o.get_target_dir(cfg, ci, None, reldir="/checkout")
|
|
||||||
self.assertEqual(res, gcd.return_value)
|
|
||||||
self.assertEqual(
|
|
||||||
gcd.call_args_list,
|
|
||||||
[mock.call("/tgt", ci, compose_type="nightly", compose_label=None)],
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_with_relative_path(self, gcd):
|
|
||||||
config = {"target": "tgt", "compose_type": "nightly"}
|
|
||||||
cfg = mock.Mock()
|
|
||||||
cfg.get.side_effect = lambda _, k: config[k]
|
|
||||||
ci = mock.Mock()
|
|
||||||
res = o.get_target_dir(cfg, ci, None, reldir="/checkout")
|
|
||||||
self.assertEqual(res, gcd.return_value)
|
|
||||||
self.assertEqual(
|
|
||||||
gcd.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
"/checkout/tgt", ci, compose_type="nightly", compose_label=None
|
|
||||||
)
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestComputeStatus(BaseTestCase):
|
|
||||||
@parameterized.expand(
|
|
||||||
[
|
|
||||||
([("FINISHED", False)], "FINISHED"),
|
|
||||||
([("FINISHED", False), ("STARTED", False)], "STARTED"),
|
|
||||||
([("FINISHED", False), ("STARTED", False), ("WAITING", False)], "STARTED"),
|
|
||||||
([("FINISHED", False), ("DOOMED", False)], "DOOMED"),
|
|
||||||
(
|
|
||||||
[("FINISHED", False), ("BLOCKED", True), ("DOOMED", True)],
|
|
||||||
"FINISHED_INCOMPLETE",
|
|
||||||
),
|
|
||||||
([("FINISHED", False), ("BLOCKED", False), ("DOOMED", True)], "DOOMED"),
|
|
||||||
([("FINISHED", False), ("DOOMED", True)], "FINISHED_INCOMPLETE"),
|
|
||||||
([("FINISHED", False), ("STARTED", False), ("DOOMED", False)], "STARTED"),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
def test_cases(self, statuses, expected):
|
|
||||||
self.assertEqual(o.compute_status(statuses), expected)
|
|
||||||
|
|
||||||
|
|
||||||
class TestUpdateStatus(PungiTestCase):
|
|
||||||
def test_updating(self):
|
|
||||||
os.makedirs(os.path.join(self.topdir, "compose/metadata"))
|
|
||||||
conf = o.Config(
|
|
||||||
self.topdir, "production", "RC-1.0", "/old", "/cfg", 1234, ["--quiet"]
|
|
||||||
)
|
|
||||||
o.update_status(
|
|
||||||
conf,
|
|
||||||
{"1": _Part("1", status="FINISHED"), "2": _Part("2", status="STARTED")},
|
|
||||||
)
|
|
||||||
self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
|
|
||||||
self.assertFileContent(
|
|
||||||
os.path.join(self.topdir, "compose/metadata/parts.json"),
|
|
||||||
dedent(
|
|
||||||
"""\
|
|
||||||
{
|
|
||||||
"1": {
|
|
||||||
"path": "/path/to/1",
|
|
||||||
"status": "FINISHED"
|
|
||||||
},
|
|
||||||
"2": {
|
|
||||||
"path": "/path/to/2",
|
|
||||||
"status": "STARTED"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("pungi_utils.orchestrator.get_target_dir")
|
|
||||||
class TestPrepareComposeDir(PungiTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super(TestPrepareComposeDir, self).setUp()
|
|
||||||
self.conf = mock.Mock(name="config")
|
|
||||||
self.main_config = "/some/config"
|
|
||||||
self.compose_info = mock.Mock(name="compose_info")
|
|
||||||
|
|
||||||
def test_new_compose(self, gtd):
|
|
||||||
def mock_get_target(conf, compose_info, label, reldir):
|
|
||||||
self.assertEqual(conf, self.conf)
|
|
||||||
self.assertEqual(compose_info, self.compose_info)
|
|
||||||
self.assertEqual(label, args.label)
|
|
||||||
self.assertEqual(reldir, "/some")
|
|
||||||
touch(os.path.join(self.topdir, "work/global/composeinfo-base.json"), "WOO")
|
|
||||||
return self.topdir
|
|
||||||
|
|
||||||
gtd.side_effect = mock_get_target
|
|
||||||
args = mock.Mock(name="args", spec=["label"])
|
|
||||||
retval = o.prepare_compose_dir(
|
|
||||||
self.conf, args, self.main_config, self.compose_info
|
|
||||||
)
|
|
||||||
self.assertEqual(retval, self.topdir)
|
|
||||||
self.assertFileContent(
|
|
||||||
os.path.join(self.topdir, "compose/metadata/composeinfo.json"), "WOO"
|
|
||||||
)
|
|
||||||
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "logs")))
|
|
||||||
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "parts")))
|
|
||||||
self.assertTrue(os.path.isdir(os.path.join(self.topdir, "work/global")))
|
|
||||||
self.assertFileContent(os.path.join(self.topdir, "STATUS"), "STARTED")
|
|
||||||
|
|
||||||
def test_restarting_compose(self, gtd):
|
|
||||||
args = mock.Mock(name="args", spec=["label", "compose_path"])
|
|
||||||
retval = o.prepare_compose_dir(
|
|
||||||
self.conf, args, self.main_config, self.compose_info
|
|
||||||
)
|
|
||||||
self.assertEqual(gtd.call_args_list, [])
|
|
||||||
self.assertEqual(retval, args.compose_path)
|
|
||||||
|
|
||||||
|
|
||||||
class TestLoadPartsMetadata(PungiTestCase):
|
|
||||||
def test_loading(self):
|
|
||||||
touch(
|
|
||||||
os.path.join(self.topdir, "compose/metadata/parts.json"), '{"foo": "bar"}'
|
|
||||||
)
|
|
||||||
conf = mock.Mock(target=self.topdir)
|
|
||||||
|
|
||||||
self.assertEqual(o.load_parts_metadata(conf), {"foo": "bar"})
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("pungi_utils.orchestrator.load_parts_metadata")
|
|
||||||
class TestSetupForRestart(BaseTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.conf = mock.Mock(name="global_config")
|
|
||||||
|
|
||||||
def test_restart_ok(self, lpm):
|
|
||||||
lpm.return_value = {
|
|
||||||
"p1": {"status": "FINISHED", "path": "/p1"},
|
|
||||||
"p2": {"status": "DOOMED", "path": "/p2"},
|
|
||||||
}
|
|
||||||
parts = {"p1": _Part("p1"), "p2": _Part("p2", parent="p1")}
|
|
||||||
|
|
||||||
o.setup_for_restart(self.conf, parts, ["p2"])
|
|
||||||
|
|
||||||
self.assertEqual(parts["p1"].status, "FINISHED")
|
|
||||||
self.assertEqual(parts["p1"].path, "/p1")
|
|
||||||
self.assertEqual(parts["p2"].status, "READY")
|
|
||||||
self.assertEqual(parts["p2"].path, None)
|
|
||||||
|
|
||||||
def test_restart_one_blocked_one_ok(self, lpm):
|
|
||||||
lpm.return_value = {
|
|
||||||
"p1": {"status": "DOOMED", "path": "/p1"},
|
|
||||||
"p2": {"status": "DOOMED", "path": "/p2"},
|
|
||||||
"p3": {"status": "WAITING", "path": None},
|
|
||||||
}
|
|
||||||
parts = {
|
|
||||||
"p1": _Part("p1"),
|
|
||||||
"p2": _Part("p2", parent="p1"),
|
|
||||||
"p3": _Part("p3", parent="p2"),
|
|
||||||
}
|
|
||||||
|
|
||||||
o.setup_for_restart(self.conf, parts, ["p1", "p3"])
|
|
||||||
|
|
||||||
self.assertEqual(parts["p1"].status, "READY")
|
|
||||||
self.assertEqual(parts["p1"].path, None)
|
|
||||||
self.assertEqual(parts["p2"].status, "DOOMED")
|
|
||||||
self.assertEqual(parts["p2"].path, "/p2")
|
|
||||||
self.assertEqual(parts["p3"].status, "WAITING")
|
|
||||||
self.assertEqual(parts["p3"].path, None)
|
|
||||||
|
|
||||||
def test_restart_all_blocked(self, lpm):
|
|
||||||
lpm.return_value = {
|
|
||||||
"p1": {"status": "DOOMED", "path": "/p1"},
|
|
||||||
"p2": {"status": "STARTED", "path": "/p2"},
|
|
||||||
}
|
|
||||||
parts = {"p1": _Part("p1"), "p2": _Part("p2", parent="p1")}
|
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError):
|
|
||||||
o.setup_for_restart(self.conf, parts, ["p2"])
|
|
||||||
|
|
||||||
self.assertEqual(parts["p1"].status, "DOOMED")
|
|
||||||
self.assertEqual(parts["p1"].path, "/p1")
|
|
||||||
self.assertEqual(parts["p2"].status, "WAITING")
|
|
||||||
self.assertEqual(parts["p2"].path, None)
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("atexit.register")
|
|
||||||
@mock.patch("kobo.shortcuts.run")
|
|
||||||
class TestRunKinit(BaseTestCase):
|
|
||||||
def test_without_config(self, run, register):
|
|
||||||
conf = mock.Mock()
|
|
||||||
conf.getboolean.return_value = False
|
|
||||||
|
|
||||||
o.run_kinit(conf)
|
|
||||||
|
|
||||||
self.assertEqual(run.call_args_list, [])
|
|
||||||
self.assertEqual(register.call_args_list, [])
|
|
||||||
|
|
||||||
@mock.patch.dict("os.environ")
|
|
||||||
def test_with_config(self, run, register):
|
|
||||||
conf = mock.Mock()
|
|
||||||
conf.getboolean.return_value = True
|
|
||||||
conf.get.side_effect = lambda section, option: option
|
|
||||||
|
|
||||||
o.run_kinit(conf)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
run.call_args_list,
|
|
||||||
[mock.call(["kinit", "-k", "-t", "kerberos_keytab", "kerberos_principal"])],
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
|
||||||
register.call_args_list, [mock.call(os.remove, os.environ["KRB5CCNAME"])]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.dict("os.environ", {}, clear=True)
|
|
||||||
class TestGetScriptEnv(BaseTestCase):
|
|
||||||
def test_without_metadata(self):
|
|
||||||
env = o.get_script_env("/foobar")
|
|
||||||
self.assertEqual(env, {"COMPOSE_PATH": "/foobar"})
|
|
||||||
|
|
||||||
def test_with_metadata(self):
|
|
||||||
compose_dir = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
|
||||||
env = o.get_script_env(compose_dir)
|
|
||||||
self.maxDiff = None
|
|
||||||
self.assertEqual(
|
|
||||||
env,
|
|
||||||
{
|
|
||||||
"COMPOSE_PATH": compose_dir,
|
|
||||||
"COMPOSE_ID": "DP-1.0-20161013.t.4",
|
|
||||||
"COMPOSE_DATE": "20161013",
|
|
||||||
"COMPOSE_TYPE": "test",
|
|
||||||
"COMPOSE_RESPIN": "4",
|
|
||||||
"COMPOSE_LABEL": "",
|
|
||||||
"RELEASE_ID": "DP-1.0",
|
|
||||||
"RELEASE_NAME": "Dummy Product",
|
|
||||||
"RELEASE_SHORT": "DP",
|
|
||||||
"RELEASE_VERSION": "1.0",
|
|
||||||
"RELEASE_TYPE": "ga",
|
|
||||||
"RELEASE_IS_LAYERED": "",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestRunScripts(BaseTestCase):
|
|
||||||
@mock.patch("pungi_utils.orchestrator.get_script_env")
|
|
||||||
@mock.patch("kobo.shortcuts.run")
|
|
||||||
def test_run_scripts(self, run, get_env):
|
|
||||||
commands = """
|
|
||||||
date
|
|
||||||
env
|
|
||||||
"""
|
|
||||||
|
|
||||||
o.run_scripts("pref_", "/tmp/compose", commands)
|
|
||||||
|
|
||||||
self.assertEqual(
|
|
||||||
run.call_args_list,
|
|
||||||
[
|
|
||||||
mock.call(
|
|
||||||
"date",
|
|
||||||
logfile="/tmp/compose/logs/pref_0.log",
|
|
||||||
env=get_env.return_value,
|
|
||||||
),
|
|
||||||
mock.call(
|
|
||||||
"env",
|
|
||||||
logfile="/tmp/compose/logs/pref_1.log",
|
|
||||||
env=get_env.return_value,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("pungi.notifier.PungiNotifier")
|
|
||||||
class TestSendNotification(BaseTestCase):
|
|
||||||
def test_no_command(self, notif):
|
|
||||||
o.send_notification("/foobar", None, None)
|
|
||||||
self.assertEqual(notif.mock_calls, [])
|
|
||||||
|
|
||||||
@mock.patch("pungi.util.load_config")
|
|
||||||
def test_with_command_and_translate(self, load_config, notif):
|
|
||||||
compose_dir = os.path.join(FIXTURE_DIR, "DP-1.0-20161013.t.4")
|
|
||||||
load_config.return_value = {
|
|
||||||
"translate_paths": [(os.path.dirname(compose_dir), "http://example.com")],
|
|
||||||
}
|
|
||||||
parts = {"foo": mock.Mock()}
|
|
||||||
|
|
||||||
o.send_notification(compose_dir, "handler", parts)
|
|
||||||
|
|
||||||
self.assertEqual(len(notif.mock_calls), 2)
|
|
||||||
self.assertEqual(notif.mock_calls[0], mock.call(["handler"]))
|
|
||||||
_, args, kwargs = notif.mock_calls[1]
|
|
||||||
self.assertEqual(args, ("status-change",))
|
|
||||||
self.assertEqual(
|
|
||||||
kwargs,
|
|
||||||
{
|
|
||||||
"status": "FINISHED",
|
|
||||||
"workdir": compose_dir,
|
|
||||||
"location": "http://example.com/DP-1.0-20161013.t.4",
|
|
||||||
"compose_id": "DP-1.0-20161013.t.4",
|
|
||||||
"compose_date": "20161013",
|
|
||||||
"compose_type": "test",
|
|
||||||
"compose_respin": "4",
|
|
||||||
"compose_label": None,
|
|
||||||
"release_id": "DP-1.0",
|
|
||||||
"release_name": "Dummy Product",
|
|
||||||
"release_short": "DP",
|
|
||||||
"release_version": "1.0",
|
|
||||||
"release_type": "ga",
|
|
||||||
"release_is_layered": False,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assertEqual(load_config.call_args_list, [mock.call(parts["foo"].config)])
|
|
@ -171,6 +171,7 @@ class OSBSThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"translate_paths": [(self.topdir, "http://root")],
|
"translate_paths": [(self.topdir, "http://root")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -3,14 +3,76 @@
|
|||||||
import mock
|
import mock
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
import koji as orig_koji
|
import koji as orig_koji
|
||||||
|
|
||||||
from tests import helpers
|
from tests import helpers
|
||||||
|
from pungi import compose
|
||||||
from pungi.phases import osbuild
|
from pungi.phases import osbuild
|
||||||
from pungi.checks import validate
|
from pungi.checks import validate
|
||||||
|
|
||||||
|
|
||||||
|
class OSBuildPhaseHelperFuncsTest(unittest.TestCase):
|
||||||
|
@mock.patch("pungi.compose.ComposeInfo")
|
||||||
|
def setUp(self, ci):
|
||||||
|
self.tmp_dir = tempfile.mkdtemp()
|
||||||
|
conf = {"translate_paths": [(self.tmp_dir, "http://example.com")]}
|
||||||
|
ci.return_value.compose.respin = 0
|
||||||
|
ci.return_value.compose.id = "RHEL-8.0-20180101.n.0"
|
||||||
|
ci.return_value.compose.date = "20160101"
|
||||||
|
ci.return_value.compose.type = "nightly"
|
||||||
|
ci.return_value.compose.type_suffix = ".n"
|
||||||
|
ci.return_value.compose.label = "RC-1.0"
|
||||||
|
ci.return_value.compose.label_major_version = "1"
|
||||||
|
|
||||||
|
compose_dir = os.path.join(self.tmp_dir, ci.return_value.compose.id)
|
||||||
|
self.compose = compose.Compose(conf, compose_dir)
|
||||||
|
server_variant = mock.Mock(uid="Server", type="variant")
|
||||||
|
client_variant = mock.Mock(uid="Client", type="variant")
|
||||||
|
self.compose.all_variants = {
|
||||||
|
"Server": server_variant,
|
||||||
|
"Client": client_variant,
|
||||||
|
}
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
shutil.rmtree(self.tmp_dir)
|
||||||
|
|
||||||
|
def test__get_repo_urls(self):
|
||||||
|
repos = [
|
||||||
|
"http://example.com/repo",
|
||||||
|
"Server",
|
||||||
|
{
|
||||||
|
"baseurl": "Client",
|
||||||
|
"package_sets": ["build"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"baseurl": "ftp://example.com/linux/repo",
|
||||||
|
"package_sets": ["build"],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
expect = [
|
||||||
|
"http://example.com/repo",
|
||||||
|
"http://example.com/RHEL-8.0-20180101.n.0/compose/Server/$basearch/os",
|
||||||
|
{
|
||||||
|
"baseurl": "http://example.com/RHEL-8.0-20180101.n.0/compose/Client/"
|
||||||
|
+ "$basearch/os",
|
||||||
|
"package_sets": ["build"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"baseurl": "ftp://example.com/linux/repo",
|
||||||
|
"package_sets": ["build"],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
osbuild.OSBuildPhase._get_repo_urls(self.compose, repos), expect
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class OSBuildPhaseTest(helpers.PungiTestCase):
|
class OSBuildPhaseTest(helpers.PungiTestCase):
|
||||||
@mock.patch("pungi.phases.osbuild.ThreadPool")
|
@mock.patch("pungi.phases.osbuild.ThreadPool")
|
||||||
def test_run(self, ThreadPool):
|
def test_run(self, ThreadPool):
|
||||||
@ -124,6 +186,49 @@ class OSBuildPhaseTest(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
self.assertNotEqual(validate(compose.conf), ([], []))
|
self.assertNotEqual(validate(compose.conf), ([], []))
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.osbuild.ThreadPool")
|
||||||
|
def test_rich_repos(self, ThreadPool):
|
||||||
|
repo = {"baseurl": "http://example.com/repo", "package_sets": ["build"]}
|
||||||
|
cfg = {
|
||||||
|
"name": "test-image",
|
||||||
|
"distro": "rhel-8",
|
||||||
|
"version": "1",
|
||||||
|
"target": "image-target",
|
||||||
|
"arches": ["x86_64"],
|
||||||
|
"image_types": ["qcow2"],
|
||||||
|
"repo": [repo],
|
||||||
|
}
|
||||||
|
compose = helpers.DummyCompose(
|
||||||
|
self.topdir, {"osbuild": {"^Everything$": [cfg]}}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertValidConfig(compose.conf)
|
||||||
|
|
||||||
|
pool = ThreadPool.return_value
|
||||||
|
|
||||||
|
phase = osbuild.OSBuildPhase(compose)
|
||||||
|
phase.run()
|
||||||
|
|
||||||
|
self.assertEqual(len(pool.add.call_args_list), 1)
|
||||||
|
self.assertEqual(
|
||||||
|
pool.queue_put.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
(
|
||||||
|
compose,
|
||||||
|
compose.variants["Everything"],
|
||||||
|
cfg,
|
||||||
|
["x86_64"],
|
||||||
|
"1",
|
||||||
|
None,
|
||||||
|
"image-target",
|
||||||
|
[repo, self.topdir + "/compose/Everything/$arch/os"],
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RunOSBuildThreadTest(helpers.PungiTestCase):
|
class RunOSBuildThreadTest(helpers.PungiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -134,6 +239,7 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"translate_paths": [(self.topdir, "http://root")],
|
"translate_paths": [(self.topdir, "http://root")],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@ -189,7 +295,13 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
|
|||||||
"1", # version
|
"1", # version
|
||||||
"15", # release
|
"15", # release
|
||||||
"image-target",
|
"image-target",
|
||||||
[self.topdir + "/compose/Everything/$arch/os"],
|
[
|
||||||
|
self.topdir + "/compose/Everything/$arch/os",
|
||||||
|
{
|
||||||
|
"baseurl": self.topdir + "/compose/Everything/$arch/os",
|
||||||
|
"package_sets": ["build"],
|
||||||
|
},
|
||||||
|
],
|
||||||
["x86_64"],
|
["x86_64"],
|
||||||
),
|
),
|
||||||
1,
|
1,
|
||||||
@ -211,7 +323,13 @@ class RunOSBuildThreadTest(helpers.PungiTestCase):
|
|||||||
["aarch64", "x86_64"],
|
["aarch64", "x86_64"],
|
||||||
opts={
|
opts={
|
||||||
"release": "15",
|
"release": "15",
|
||||||
"repo": [self.topdir + "/compose/Everything/$arch/os"],
|
"repo": [
|
||||||
|
self.topdir + "/compose/Everything/$arch/os",
|
||||||
|
{
|
||||||
|
"baseurl": self.topdir + "/compose/Everything/$arch/os",
|
||||||
|
"package_sets": ["build"],
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
mock.call.save_task_id(1234),
|
mock.call.save_task_id(1234),
|
||||||
|
@ -103,6 +103,7 @@ class OstreeThreadTest(helpers.PungiTestCase):
|
|||||||
"release_name": "Fedora",
|
"release_name": "Fedora",
|
||||||
"release_version": "Rawhide",
|
"release_version": "Rawhide",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"image_volid_formats": ["{release_short}-{variant}-{arch}"],
|
"image_volid_formats": ["{release_short}-{variant}-{arch}"],
|
||||||
"translate_paths": [(self.topdir + "/work", "http://example.com/work")],
|
"translate_paths": [(self.topdir + "/work", "http://example.com/work")],
|
||||||
|
@ -123,6 +123,7 @@ class OSTreeThreadTest(helpers.PungiTestCase):
|
|||||||
self.topdir,
|
self.topdir,
|
||||||
{
|
{
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"translate_paths": [(self.topdir, "http://example.com")],
|
"translate_paths": [(self.topdir, "http://example.com")],
|
||||||
},
|
},
|
||||||
|
@ -315,7 +315,6 @@ class OstreeTreeScriptTest(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("kobo.shortcuts.run")
|
@mock.patch("kobo.shortcuts.run")
|
||||||
def test_extra_config_with_keep_original_sources(self, run):
|
def test_extra_config_with_keep_original_sources(self, run):
|
||||||
|
|
||||||
configdir = os.path.join(self.topdir, "config")
|
configdir = os.path.join(self.topdir, "config")
|
||||||
self._make_dummy_config_dir(configdir)
|
self._make_dummy_config_dir(configdir)
|
||||||
treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
|
treefile = os.path.join(configdir, "fedora-atomic-docker-host.json")
|
||||||
|
@ -47,7 +47,7 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
|
|||||||
pkgset.name = name
|
pkgset.name = name
|
||||||
pkgset.reuse = None
|
pkgset.reuse = None
|
||||||
|
|
||||||
def mock_subset(primary, arch_list, exclusive_noarch):
|
def mock_subset(primary, arch_list, **kwargs):
|
||||||
self.subsets[primary] = mock.Mock()
|
self.subsets[primary] = mock.Mock()
|
||||||
return self.subsets[primary]
|
return self.subsets[primary]
|
||||||
|
|
||||||
@ -73,10 +73,16 @@ class TestMaterializedPkgsetCreate(helpers.PungiTestCase):
|
|||||||
self.assertEqual(result["amd64"], self.subsets["amd64"])
|
self.assertEqual(result["amd64"], self.subsets["amd64"])
|
||||||
|
|
||||||
self.pkgset.subset.assert_any_call(
|
self.pkgset.subset.assert_any_call(
|
||||||
"x86_64", ["x86_64", "noarch", "src"], exclusive_noarch=True
|
"x86_64",
|
||||||
|
["x86_64", "noarch", "src"],
|
||||||
|
exclusive_noarch=True,
|
||||||
|
inherit_to_noarch=True,
|
||||||
)
|
)
|
||||||
self.pkgset.subset.assert_any_call(
|
self.pkgset.subset.assert_any_call(
|
||||||
"amd64", ["amd64", "x86_64", "noarch", "src"], exclusive_noarch=True
|
"amd64",
|
||||||
|
["amd64", "x86_64", "noarch", "src"],
|
||||||
|
exclusive_noarch=True,
|
||||||
|
inherit_to_noarch=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
for arch, pkgset in result.package_sets.items():
|
for arch, pkgset in result.package_sets.items():
|
||||||
|
@ -133,6 +133,14 @@ class PkgsetCompareMixin(object):
|
|||||||
self.assertEqual({}, actual)
|
self.assertEqual({}, actual)
|
||||||
|
|
||||||
|
|
||||||
|
class DummySystem(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.methods = ["_listapi", "Dummy", "getRPM", "getRPMChecksums"]
|
||||||
|
|
||||||
|
def listMethods(self):
|
||||||
|
return self.methods
|
||||||
|
|
||||||
|
|
||||||
@mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
|
@mock.patch("pungi.phases.pkgset.pkgsets.ReaderPool", new=FakePool)
|
||||||
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
|
@mock.patch("kobo.pkgset.FileCache", new=MockFileCache)
|
||||||
class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
||||||
@ -142,9 +150,10 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
self.tagged_rpms = json.load(f)
|
self.tagged_rpms = json.load(f)
|
||||||
|
|
||||||
self.path_info = MockPathInfo(self.topdir)
|
self.path_info = MockPathInfo(self.topdir)
|
||||||
|
self.koji_downloader = helpers.FSKojiDownloader()
|
||||||
self.koji_wrapper = mock.Mock()
|
self.koji_wrapper = mock.Mock()
|
||||||
self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = self.tagged_rpms
|
self.koji_wrapper.koji_proxy.listTaggedRPMS.return_value = self.tagged_rpms
|
||||||
|
self.koji_wrapper.koji_proxy.system = DummySystem()
|
||||||
self.koji_wrapper.koji_module.pathinfo = self.path_info
|
self.koji_wrapper.koji_module.pathinfo = self.path_info
|
||||||
|
|
||||||
def _touch_files(self, filenames):
|
def _touch_files(self, filenames):
|
||||||
@ -171,7 +180,9 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
pkgset = pkgsets.KojiPackageSet("pkgset", self.koji_wrapper, [None])
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
|
"pkgset", self.koji_wrapper, [None], downloader=self.koji_downloader
|
||||||
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
|
|
||||||
@ -205,7 +216,11 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset", self.koji_wrapper, [None], arches=["x86_64"]
|
"pkgset",
|
||||||
|
self.koji_wrapper,
|
||||||
|
[None],
|
||||||
|
arches=["x86_64"],
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -235,7 +250,11 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset", self.koji_wrapper, ["cafebabe", "deadbeef"], arches=["x86_64"]
|
"pkgset",
|
||||||
|
self.koji_wrapper,
|
||||||
|
["cafebabe", "deadbeef"],
|
||||||
|
arches=["x86_64"],
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -264,7 +283,11 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
|
"pkgset",
|
||||||
|
self.koji_wrapper,
|
||||||
|
["cafebabe", None],
|
||||||
|
arches=["x86_64"],
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -286,7 +309,11 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
|
|
||||||
def test_can_not_find_signed_package(self):
|
def test_can_not_find_signed_package(self):
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset", self.koji_wrapper, ["cafebabe"], arches=["x86_64"]
|
"pkgset",
|
||||||
|
self.koji_wrapper,
|
||||||
|
["cafebabe"],
|
||||||
|
arches=["x86_64"],
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
@ -325,6 +352,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
signed_packages_retries=2,
|
signed_packages_retries=2,
|
||||||
signed_packages_wait=5,
|
signed_packages_wait=5,
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -362,6 +390,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
["cafebabe"],
|
["cafebabe"],
|
||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
allow_invalid_sigkeys=True,
|
allow_invalid_sigkeys=True,
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
pkgset.populate("f25")
|
pkgset.populate("f25")
|
||||||
@ -382,7 +411,11 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
|
|
||||||
def test_can_not_find_any_package(self):
|
def test_can_not_find_any_package(self):
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset", self.koji_wrapper, ["cafebabe", None], arches=["x86_64"]
|
"pkgset",
|
||||||
|
self.koji_wrapper,
|
||||||
|
["cafebabe", None],
|
||||||
|
arches=["x86_64"],
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
@ -407,6 +440,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
signed_packages_retries=2,
|
signed_packages_retries=2,
|
||||||
signed_packages_wait=5,
|
signed_packages_wait=5,
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(RuntimeError) as ctx:
|
with self.assertRaises(RuntimeError) as ctx:
|
||||||
@ -443,6 +477,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
[None],
|
[None],
|
||||||
packages=["bash"],
|
packages=["bash"],
|
||||||
populate_only_packages=True,
|
populate_only_packages=True,
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -543,6 +578,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
[None],
|
[None],
|
||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
cache_region=cache_region,
|
cache_region=cache_region,
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Try calling the populate twice, but expect just single listTaggedRPMs
|
# Try calling the populate twice, but expect just single listTaggedRPMs
|
||||||
@ -582,6 +618,7 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
[None],
|
[None],
|
||||||
arches=["x86_64"],
|
arches=["x86_64"],
|
||||||
cache_region=cache_region,
|
cache_region=cache_region,
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Try calling the populate twice with different event id. It must not
|
# Try calling the populate twice with different event id. It must not
|
||||||
@ -635,7 +672,11 @@ class TestKojiPkgset(PkgsetCompareMixin, helpers.PungiTestCase):
|
|||||||
]
|
]
|
||||||
|
|
||||||
pkgset = pkgsets.KojiPackageSet(
|
pkgset = pkgsets.KojiPackageSet(
|
||||||
"pkgset", self.koji_wrapper, [None], extra_builds=["pungi-4.1.3-3.fc25"]
|
"pkgset",
|
||||||
|
self.koji_wrapper,
|
||||||
|
[None],
|
||||||
|
extra_builds=["pungi-4.1.3-3.fc25"],
|
||||||
|
downloader=self.koji_downloader,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = pkgset.populate("f25")
|
result = pkgset.populate("f25")
|
||||||
@ -853,6 +894,8 @@ class TestReuseKojiPkgset(helpers.PungiTestCase):
|
|||||||
"include_packages": None,
|
"include_packages": None,
|
||||||
"rpms_by_arch": mock.Mock(),
|
"rpms_by_arch": mock.Mock(),
|
||||||
"srpms_by_name": mock.Mock(),
|
"srpms_by_name": mock.Mock(),
|
||||||
|
"exclusive_noarch": True,
|
||||||
|
"inherit_to_noarch": True,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
self.pkgset.old_file_cache = mock.Mock()
|
self.pkgset.old_file_cache = mock.Mock()
|
||||||
@ -934,6 +977,28 @@ class TestMergePackageSets(PkgsetCompareMixin, unittest.TestCase):
|
|||||||
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
|
first.rpms_by_arch, {"i686": ["rpms/bash@4.3.42@4.fc24@i686"], "noarch": []}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_merge_doesnt_exclude_noarch_exclude_arch_when_configured(self):
|
||||||
|
first = pkgsets.PackageSetBase("first", [None])
|
||||||
|
second = pkgsets.PackageSetBase("second", [None])
|
||||||
|
|
||||||
|
pkg = first.file_cache.add("rpms/bash@4.3.42@4.fc24@i686")
|
||||||
|
first.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
|
||||||
|
|
||||||
|
pkg = second.file_cache.add("rpms/pungi@4.1.3@3.fc25@noarch")
|
||||||
|
pkg.excludearch = ["i686"]
|
||||||
|
second.rpms_by_arch.setdefault(pkg.arch, []).append(pkg)
|
||||||
|
|
||||||
|
first.merge(second, "i386", ["i686", "noarch"], inherit_to_noarch=False)
|
||||||
|
|
||||||
|
print(first.rpms_by_arch)
|
||||||
|
self.assertPkgsetEqual(
|
||||||
|
first.rpms_by_arch,
|
||||||
|
{
|
||||||
|
"i686": ["rpms/bash@4.3.42@4.fc24@i686"],
|
||||||
|
"noarch": ["rpms/pungi@4.1.3@3.fc25@noarch"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
def test_merge_excludes_noarch_exclusive_arch(self):
|
def test_merge_excludes_noarch_exclusive_arch(self):
|
||||||
first = pkgsets.PackageSetBase("first", [None])
|
first = pkgsets.PackageSetBase("first", [None])
|
||||||
second = pkgsets.PackageSetBase("second", [None])
|
second = pkgsets.PackageSetBase("second", [None])
|
||||||
|
@ -31,6 +31,17 @@ TAG_INFO = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _mk_module_build(r, t):
|
||||||
|
"""Create a dict as returned Koji buildinfo."""
|
||||||
|
return {
|
||||||
|
"name": "foo",
|
||||||
|
"version": "1",
|
||||||
|
"release": r,
|
||||||
|
"nvr": "foo-1-%s" % r,
|
||||||
|
"tag_name": t,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class TestGetKojiEvent(helpers.PungiTestCase):
|
class TestGetKojiEvent(helpers.PungiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestGetKojiEvent, self).setUp()
|
super(TestGetKojiEvent, self).setUp()
|
||||||
@ -73,7 +84,8 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestPopulateGlobalPkgset, self).setUp()
|
super(TestPopulateGlobalPkgset, self).setUp()
|
||||||
self.compose = helpers.DummyCompose(
|
self.compose = helpers.DummyCompose(
|
||||||
self.topdir, {"pkgset_koji_tag": "f25", "sigkeys": ["foo", "bar"]}
|
self.topdir,
|
||||||
|
{"pkgset_koji_tag": "f25", "sigkeys": ["foo", "bar"], "koji_cache": "/tmp"},
|
||||||
)
|
)
|
||||||
self.koji_wrapper = mock.Mock()
|
self.koji_wrapper = mock.Mock()
|
||||||
self.pkgset_path = os.path.join(
|
self.pkgset_path = os.path.join(
|
||||||
@ -92,13 +104,20 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
orig_pkgset = KojiPackageSet.return_value
|
orig_pkgset = KojiPackageSet.return_value
|
||||||
|
|
||||||
pkgsets = source_koji.populate_global_pkgset(
|
pkgsets = source_koji.populate_global_pkgset(
|
||||||
self.compose, self.koji_wrapper, "/prefix", 123456
|
self.compose, self.koji_wrapper, 123456
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(len(pkgsets), 1)
|
self.assertEqual(len(pkgsets), 1)
|
||||||
self.assertIs(pkgsets[0], orig_pkgset)
|
self.assertIs(pkgsets[0], orig_pkgset)
|
||||||
pkgsets[0].assert_has_calls(
|
pkgsets[0].assert_has_calls(
|
||||||
[mock.call.populate("f25", 123456, inherit=True, include_packages=set())],
|
[
|
||||||
|
mock.call.populate(
|
||||||
|
"f25",
|
||||||
|
event=123456,
|
||||||
|
inherit=True,
|
||||||
|
include_packages=set(),
|
||||||
|
),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def mock_materialize(self, compose, pkgset, prefix, mmd):
|
def mock_materialize(self, compose, pkgset, prefix, mmd):
|
||||||
@ -111,7 +130,11 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
def test_populate_with_multiple_koji_tags(self, KojiPackageSet, materialize):
|
def test_populate_with_multiple_koji_tags(self, KojiPackageSet, materialize):
|
||||||
self.compose = helpers.DummyCompose(
|
self.compose = helpers.DummyCompose(
|
||||||
self.topdir,
|
self.topdir,
|
||||||
{"pkgset_koji_tag": ["f25", "f25-extra"], "sigkeys": ["foo", "bar"]},
|
{
|
||||||
|
"pkgset_koji_tag": ["f25", "f25-extra"],
|
||||||
|
"sigkeys": ["foo", "bar"],
|
||||||
|
"koji_cache": "/tmp",
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
materialize.side_effect = self.mock_materialize
|
materialize.side_effect = self.mock_materialize
|
||||||
@ -119,7 +142,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
KojiPackageSet.return_value.reuse = None
|
KojiPackageSet.return_value.reuse = None
|
||||||
|
|
||||||
pkgsets = source_koji.populate_global_pkgset(
|
pkgsets = source_koji.populate_global_pkgset(
|
||||||
self.compose, self.koji_wrapper, "/prefix", 123456
|
self.compose, self.koji_wrapper, 123456
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(len(pkgsets), 2)
|
self.assertEqual(len(pkgsets), 2)
|
||||||
@ -135,12 +158,16 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
pkgsets[0].assert_has_calls(
|
pkgsets[0].assert_has_calls(
|
||||||
[mock.call.populate("f25", 123456, inherit=True, include_packages=set())]
|
[
|
||||||
|
mock.call.populate(
|
||||||
|
"f25", event=123456, inherit=True, include_packages=set()
|
||||||
|
),
|
||||||
|
]
|
||||||
)
|
)
|
||||||
pkgsets[1].assert_has_calls(
|
pkgsets[1].assert_has_calls(
|
||||||
[
|
[
|
||||||
mock.call.populate(
|
mock.call.populate(
|
||||||
"f25-extra", 123456, inherit=True, include_packages=set()
|
"f25-extra", event=123456, inherit=True, include_packages=set()
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@ -154,6 +181,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
{
|
{
|
||||||
"gather_method": "nodeps",
|
"gather_method": "nodeps",
|
||||||
"pkgset_koji_tag": "f25",
|
"pkgset_koji_tag": "f25",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"sigkeys": ["foo", "bar"],
|
"sigkeys": ["foo", "bar"],
|
||||||
"additional_packages": [(".*", {"*": ["pkg", "foo.x86_64"]})],
|
"additional_packages": [(".*", {"*": ["pkg", "foo.x86_64"]})],
|
||||||
},
|
},
|
||||||
@ -162,7 +190,7 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
materialize.side_effect = self.mock_materialize
|
materialize.side_effect = self.mock_materialize
|
||||||
|
|
||||||
pkgsets = source_koji.populate_global_pkgset(
|
pkgsets = source_koji.populate_global_pkgset(
|
||||||
self.compose, self.koji_wrapper, "/prefix", 123456
|
self.compose, self.koji_wrapper, 123456
|
||||||
)
|
)
|
||||||
self.assertEqual(len(pkgsets), 1)
|
self.assertEqual(len(pkgsets), 1)
|
||||||
six.assertCountEqual(self, pkgsets[0].packages, ["pkg", "foo"])
|
six.assertCountEqual(self, pkgsets[0].packages, ["pkg", "foo"])
|
||||||
@ -171,7 +199,9 @@ class TestPopulateGlobalPkgset(helpers.PungiTestCase):
|
|||||||
class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestGetPackageSetFromKoji, self).setUp()
|
super(TestGetPackageSetFromKoji, self).setUp()
|
||||||
self.compose = helpers.DummyCompose(self.topdir, {"pkgset_koji_tag": "f25"})
|
self.compose = helpers.DummyCompose(
|
||||||
|
self.topdir, {"pkgset_koji_tag": "f25", "koji_cache": "/tmp"}
|
||||||
|
)
|
||||||
self.compose.koji_event = None
|
self.compose.koji_event = None
|
||||||
self.koji_wrapper = mock.Mock()
|
self.koji_wrapper = mock.Mock()
|
||||||
self.koji_wrapper.koji_proxy.getLastEvent.return_value = EVENT_INFO
|
self.koji_wrapper.koji_proxy.getLastEvent.return_value = EVENT_INFO
|
||||||
@ -179,9 +209,7 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||||||
|
|
||||||
@mock.patch("pungi.phases.pkgset.sources.source_koji.populate_global_pkgset")
|
@mock.patch("pungi.phases.pkgset.sources.source_koji.populate_global_pkgset")
|
||||||
def test_get_package_sets(self, pgp):
|
def test_get_package_sets(self, pgp):
|
||||||
pkgsets = source_koji.get_pkgset_from_koji(
|
pkgsets = source_koji.get_pkgset_from_koji(self.compose, self.koji_wrapper)
|
||||||
self.compose, self.koji_wrapper, "/prefix"
|
|
||||||
)
|
|
||||||
|
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(
|
||||||
self, self.koji_wrapper.koji_proxy.mock_calls, [mock.call.getLastEvent()]
|
self, self.koji_wrapper.koji_proxy.mock_calls, [mock.call.getLastEvent()]
|
||||||
@ -190,7 +218,7 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
pgp.call_args_list,
|
pgp.call_args_list,
|
||||||
[mock.call(self.compose, self.koji_wrapper, "/prefix", EVENT_INFO)],
|
[mock.call(self.compose, self.koji_wrapper, EVENT_INFO)],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_koji_modules(self):
|
def test_get_koji_modules(self):
|
||||||
@ -232,10 +260,10 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||||||
self.compose, self.koji_wrapper, event, module_info_str
|
self.compose, self.koji_wrapper, event, module_info_str
|
||||||
)
|
)
|
||||||
|
|
||||||
assert type(result) is list
|
assert isinstance(result, list)
|
||||||
assert len(result) == 1
|
assert len(result) == 1
|
||||||
module = result[0]
|
module = result[0]
|
||||||
assert type(module) is dict
|
assert isinstance(module, dict)
|
||||||
self.assertIn("module_stream", module)
|
self.assertIn("module_stream", module)
|
||||||
self.assertIn("module_version", module)
|
self.assertIn("module_version", module)
|
||||||
self.assertIn("module_context", module)
|
self.assertIn("module_context", module)
|
||||||
@ -361,11 +389,11 @@ class TestGetPackageSetFromKoji(helpers.PungiTestCase):
|
|||||||
self.compose, self.koji_wrapper, event, module_info_str
|
self.compose, self.koji_wrapper, event, module_info_str
|
||||||
)
|
)
|
||||||
|
|
||||||
assert type(result) is list
|
assert isinstance(result, list)
|
||||||
assert len(result) == 2
|
assert len(result) == 2
|
||||||
module = result[0]
|
module = result[0]
|
||||||
for module in result:
|
for module in result:
|
||||||
assert type(module) is dict
|
assert isinstance(module, dict)
|
||||||
self.assertIn("module_stream", module)
|
self.assertIn("module_stream", module)
|
||||||
self.assertIn("module_version", module)
|
self.assertIn("module_version", module)
|
||||||
self.assertIn("module_context", module)
|
self.assertIn("module_context", module)
|
||||||
@ -440,14 +468,16 @@ class TestSourceKoji(helpers.PungiTestCase):
|
|||||||
@mock.patch("pungi.phases.pkgset.sources.source_koji.get_pkgset_from_koji")
|
@mock.patch("pungi.phases.pkgset.sources.source_koji.get_pkgset_from_koji")
|
||||||
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
@mock.patch("pungi.wrappers.kojiwrapper.KojiWrapper")
|
||||||
def test_run(self, KojiWrapper, gpfk):
|
def test_run(self, KojiWrapper, gpfk):
|
||||||
compose = helpers.DummyCompose(self.topdir, {"koji_profile": "koji"})
|
compose = helpers.DummyCompose(
|
||||||
|
self.topdir, {"koji_profile": "koji", "koji_cache": "/prefix"}
|
||||||
|
)
|
||||||
KojiWrapper.return_value.koji_module.config.topdir = "/prefix"
|
KojiWrapper.return_value.koji_module.config.topdir = "/prefix"
|
||||||
|
|
||||||
phase = source_koji.PkgsetSourceKoji(compose)
|
phase = source_koji.PkgsetSourceKoji(compose)
|
||||||
pkgsets, path_prefix = phase()
|
pkgsets, path_prefix = phase()
|
||||||
|
|
||||||
self.assertEqual(pkgsets, gpfk.return_value)
|
self.assertEqual(pkgsets, gpfk.return_value)
|
||||||
self.assertEqual(path_prefix, "/prefix/")
|
self.assertEqual(path_prefix, "/prefix")
|
||||||
self.assertEqual(KojiWrapper.mock_calls, [mock.call(compose)])
|
self.assertEqual(KojiWrapper.mock_calls, [mock.call(compose)])
|
||||||
|
|
||||||
|
|
||||||
@ -539,19 +569,15 @@ class TestFilterInherited(unittest.TestCase):
|
|||||||
{"name": "middle-tag"},
|
{"name": "middle-tag"},
|
||||||
{"name": "bottom-tag"},
|
{"name": "bottom-tag"},
|
||||||
]
|
]
|
||||||
module_builds = [
|
|
||||||
{"name": "foo", "version": "1", "release": "1", "tag_name": "top-tag"},
|
m1 = _mk_module_build("1", "top-tag")
|
||||||
{"name": "foo", "version": "1", "release": "2", "tag_name": "bottom-tag"},
|
m2 = _mk_module_build("2", "middle-tag")
|
||||||
{"name": "foo", "version": "1", "release": "3", "tag_name": "middle-tag"},
|
m3 = _mk_module_build("3", "bottom-tag")
|
||||||
]
|
module_builds = [m1, m2, m3]
|
||||||
|
|
||||||
result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag)
|
result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag)
|
||||||
|
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(self, result, [m1])
|
||||||
self,
|
|
||||||
result,
|
|
||||||
[{"name": "foo", "version": "1", "release": "1", "tag_name": "top-tag"}],
|
|
||||||
)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
koji_proxy.mock_calls,
|
koji_proxy.mock_calls,
|
||||||
[mock.call.getFullInheritance("top-tag", event=123456)],
|
[mock.call.getFullInheritance("top-tag", event=123456)],
|
||||||
@ -566,18 +592,33 @@ class TestFilterInherited(unittest.TestCase):
|
|||||||
{"name": "middle-tag"},
|
{"name": "middle-tag"},
|
||||||
{"name": "bottom-tag"},
|
{"name": "bottom-tag"},
|
||||||
]
|
]
|
||||||
module_builds = [
|
m2 = _mk_module_build("2", "bottom-tag")
|
||||||
{"name": "foo", "version": "1", "release": "2", "tag_name": "bottom-tag"},
|
m3 = _mk_module_build("3", "middle-tag")
|
||||||
{"name": "foo", "version": "1", "release": "3", "tag_name": "middle-tag"},
|
module_builds = [m2, m3]
|
||||||
]
|
|
||||||
|
|
||||||
result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag)
|
result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag)
|
||||||
|
|
||||||
six.assertCountEqual(
|
six.assertCountEqual(self, result, [m3])
|
||||||
self,
|
self.assertEqual(
|
||||||
result,
|
koji_proxy.mock_calls,
|
||||||
[{"name": "foo", "version": "1", "release": "3", "tag_name": "middle-tag"}],
|
[mock.call.getFullInheritance("top-tag", event=123456)],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_build_in_multiple_tags(self):
|
||||||
|
event = {"id": 123456}
|
||||||
|
koji_proxy = mock.Mock()
|
||||||
|
top_tag = "top-tag"
|
||||||
|
|
||||||
|
koji_proxy.getFullInheritance.return_value = [
|
||||||
|
{"name": "middle-tag"},
|
||||||
|
{"name": "bottom-tag"},
|
||||||
|
]
|
||||||
|
m = _mk_module_build("1", "middle-tag")
|
||||||
|
module_builds = [m, m]
|
||||||
|
|
||||||
|
result = source_koji.filter_inherited(koji_proxy, event, module_builds, top_tag)
|
||||||
|
|
||||||
|
six.assertCountEqual(self, result, [m])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
koji_proxy.mock_calls,
|
koji_proxy.mock_calls,
|
||||||
[mock.call.getFullInheritance("top-tag", event=123456)],
|
[mock.call.getFullInheritance("top-tag", event=123456)],
|
||||||
@ -679,6 +720,7 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestAddModuleToVariant, self).setUp()
|
super(TestAddModuleToVariant, self).setUp()
|
||||||
self.koji = mock.Mock()
|
self.koji = mock.Mock()
|
||||||
|
self.compose = helpers.DummyCompose(self.topdir, {})
|
||||||
self.koji.koji_module.pathinfo.typedir.return_value = MMDS_DIR
|
self.koji.koji_module.pathinfo.typedir.return_value = MMDS_DIR
|
||||||
files = ["modulemd.x86_64.txt", "modulemd.armv7hl.txt", "modulemd.txt"]
|
files = ["modulemd.x86_64.txt", "modulemd.armv7hl.txt", "modulemd.txt"]
|
||||||
self.koji.koji_proxy.listArchives.return_value = [
|
self.koji.koji_proxy.listArchives.return_value = [
|
||||||
@ -701,7 +743,9 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||||||
def test_adding_module(self):
|
def test_adding_module(self):
|
||||||
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
|
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
|
||||||
|
|
||||||
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
|
source_koji._add_module_to_variant(
|
||||||
|
self.koji, variant, self.buildinfo, compose=self.compose
|
||||||
|
)
|
||||||
|
|
||||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||||
self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
|
self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
|
||||||
@ -723,7 +767,9 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||||||
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
|
modules=[{"name": "m1:latest-20190101:cafe", "glob": False}],
|
||||||
)
|
)
|
||||||
|
|
||||||
source_koji._add_module_to_variant(self.koji, variant, self.buildinfo)
|
source_koji._add_module_to_variant(
|
||||||
|
self.koji, variant, self.buildinfo, compose=self.compose
|
||||||
|
)
|
||||||
|
|
||||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||||
self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
|
self.assertEqual(mod1.get_NSVCA(), "module:master:20190318:abcdef:armhfp")
|
||||||
@ -740,7 +786,11 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||||||
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
|
variant = mock.Mock(arches=["armhfp", "x86_64"], arch_mmds={}, modules=[])
|
||||||
|
|
||||||
source_koji._add_module_to_variant(
|
source_koji._add_module_to_variant(
|
||||||
self.koji, variant, self.buildinfo, add_to_variant_modules=True
|
self.koji,
|
||||||
|
variant,
|
||||||
|
self.buildinfo,
|
||||||
|
compose=self.compose,
|
||||||
|
add_to_variant_modules=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||||
@ -766,7 +816,11 @@ class TestAddModuleToVariant(helpers.PungiTestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
source_koji._add_module_to_variant(
|
source_koji._add_module_to_variant(
|
||||||
self.koji, variant, self.buildinfo, add_to_variant_modules=True
|
self.koji,
|
||||||
|
variant,
|
||||||
|
self.buildinfo,
|
||||||
|
compose=self.compose,
|
||||||
|
add_to_variant_modules=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
mod1 = variant.arch_mmds["armhfp"]["module:master:20190318:abcdef"]
|
||||||
|
@ -13,8 +13,10 @@ import random
|
|||||||
import os
|
import os
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
from parameterized import parameterized
|
||||||
|
|
||||||
from pungi.wrappers import scm
|
from pungi.wrappers import scm
|
||||||
from tests.helpers import touch
|
from tests.helpers import touch, GIT_WITH_CREDS
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
|
|
||||||
|
|
||||||
@ -109,37 +111,49 @@ class FileSCMTestCase(SCMBaseTest):
|
|||||||
self.assertIn("No directories matched", str(ctx.exception))
|
self.assertIn("No directories matched", str(ctx.exception))
|
||||||
|
|
||||||
|
|
||||||
|
CREDENTIALS_CONFIG = {"credential_helper": "!ch"}
|
||||||
|
|
||||||
|
|
||||||
class GitSCMTestCase(SCMBaseTest):
|
class GitSCMTestCase(SCMBaseTest):
|
||||||
def assertCalls(self, mock_run, url, branch, command=None):
|
def tearDown(self):
|
||||||
|
shutil.rmtree("/tmp/pungi-temp-git-repos-%s" % os.getpid())
|
||||||
|
super(GitSCMTestCase, self).tearDown()
|
||||||
|
|
||||||
|
def assertCalls(self, mock_run, url, branch, command=None, with_creds=False):
|
||||||
|
git = GIT_WITH_CREDS if with_creds else ["git"]
|
||||||
command = [command] if command else []
|
command = [command] if command else []
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[call[0][0] for call in mock_run.call_args_list],
|
[call[0][0] for call in mock_run.call_args_list],
|
||||||
[
|
[
|
||||||
["git", "init"],
|
["git", "init"],
|
||||||
["git", "fetch", "--depth=1", url, branch],
|
git + ["fetch", "--depth=1", url, branch],
|
||||||
["git", "checkout", "FETCH_HEAD"],
|
["git", "checkout", "FETCH_HEAD"],
|
||||||
]
|
]
|
||||||
+ command,
|
+ command,
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.scm.run")
|
@parameterized.expand([("without_creds", {}), ("with_creds", CREDENTIALS_CONFIG)])
|
||||||
def test_get_file(self, run):
|
def test_get_file(self, _name, config):
|
||||||
def process(cmd, workdir=None, **kwargs):
|
def process(cmd, workdir=None, **kwargs):
|
||||||
touch(os.path.join(workdir, "some_file.txt"))
|
touch(os.path.join(workdir, "some_file.txt"))
|
||||||
touch(os.path.join(workdir, "other_file.txt"))
|
touch(os.path.join(workdir, "other_file.txt"))
|
||||||
|
|
||||||
run.side_effect = process
|
with mock.patch("pungi.wrappers.scm.run") as run:
|
||||||
|
run.side_effect = process
|
||||||
|
retval = scm.get_file_from_scm(
|
||||||
|
{
|
||||||
|
"scm": "git",
|
||||||
|
"repo": "git://example.com/git/repo.git",
|
||||||
|
"file": "some_file.txt",
|
||||||
|
"options": config,
|
||||||
|
},
|
||||||
|
self.destdir,
|
||||||
|
)
|
||||||
|
|
||||||
retval = scm.get_file_from_scm(
|
|
||||||
{
|
|
||||||
"scm": "git",
|
|
||||||
"repo": "git://example.com/git/repo.git",
|
|
||||||
"file": "some_file.txt",
|
|
||||||
},
|
|
||||||
self.destdir,
|
|
||||||
)
|
|
||||||
self.assertStructure(retval, ["some_file.txt"])
|
self.assertStructure(retval, ["some_file.txt"])
|
||||||
self.assertCalls(run, "git://example.com/git/repo.git", "master")
|
self.assertCalls(
|
||||||
|
run, "git://example.com/git/repo.git", "master", with_creds=bool(config)
|
||||||
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.scm.run")
|
@mock.patch("pungi.wrappers.scm.run")
|
||||||
def test_get_file_function(self, run):
|
def test_get_file_function(self, run):
|
||||||
@ -163,9 +177,10 @@ class GitSCMTestCase(SCMBaseTest):
|
|||||||
self.assertEqual(retval, destination)
|
self.assertEqual(retval, destination)
|
||||||
self.assertCalls(run, "git://example.com/git/repo.git", "master")
|
self.assertCalls(run, "git://example.com/git/repo.git", "master")
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.scm.run")
|
@parameterized.expand([("without_creds", {}), ("with_creds", CREDENTIALS_CONFIG)])
|
||||||
def test_get_file_fetch_fails(self, run):
|
def test_get_file_fetch_fails(self, _name, config):
|
||||||
url = "git://example.com/git/repo.git"
|
url = "git://example.com/git/repo.git"
|
||||||
|
git = GIT_WITH_CREDS if config else ["git"]
|
||||||
|
|
||||||
def process(cmd, workdir=None, **kwargs):
|
def process(cmd, workdir=None, **kwargs):
|
||||||
if "fetch" in cmd:
|
if "fetch" in cmd:
|
||||||
@ -175,18 +190,20 @@ class GitSCMTestCase(SCMBaseTest):
|
|||||||
touch(os.path.join(workdir, "some_file.txt"))
|
touch(os.path.join(workdir, "some_file.txt"))
|
||||||
touch(os.path.join(workdir, "other_file.txt"))
|
touch(os.path.join(workdir, "other_file.txt"))
|
||||||
|
|
||||||
run.side_effect = process
|
with mock.patch("pungi.wrappers.scm.run") as run:
|
||||||
|
run.side_effect = process
|
||||||
|
retval = scm.get_file_from_scm(
|
||||||
|
{"scm": "git", "repo": url, "file": "some_file.txt", "options": config},
|
||||||
|
self.destdir,
|
||||||
|
)
|
||||||
|
|
||||||
retval = scm.get_file_from_scm(
|
|
||||||
{"scm": "git", "repo": url, "file": "some_file.txt"}, self.destdir
|
|
||||||
)
|
|
||||||
self.assertStructure(retval, ["some_file.txt"])
|
self.assertStructure(retval, ["some_file.txt"])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[call[0][0] for call in run.call_args_list],
|
[call[0][0] for call in run.call_args_list],
|
||||||
[
|
[
|
||||||
["git", "init"],
|
["git", "init"],
|
||||||
[
|
git
|
||||||
"git",
|
+ [
|
||||||
"fetch",
|
"fetch",
|
||||||
"--depth=1",
|
"--depth=1",
|
||||||
"git://example.com/git/repo.git",
|
"git://example.com/git/repo.git",
|
||||||
@ -194,7 +211,7 @@ class GitSCMTestCase(SCMBaseTest):
|
|||||||
],
|
],
|
||||||
["git", "init"],
|
["git", "init"],
|
||||||
["git", "remote", "add", "origin", url],
|
["git", "remote", "add", "origin", url],
|
||||||
["git", "remote", "update", "origin"],
|
git + ["remote", "update", "origin"],
|
||||||
["git", "checkout", "master"],
|
["git", "checkout", "master"],
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -243,20 +260,28 @@ class GitSCMTestCase(SCMBaseTest):
|
|||||||
|
|
||||||
self.assertEqual(str(ctx.exception), "'make' failed with exit code 1")
|
self.assertEqual(str(ctx.exception), "'make' failed with exit code 1")
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.scm.run")
|
@parameterized.expand([("without_creds", {}), ("with_creds", CREDENTIALS_CONFIG)])
|
||||||
def test_get_dir(self, run):
|
def test_get_dir(self, _name, config):
|
||||||
def process(cmd, workdir=None, **kwargs):
|
def process(cmd, workdir=None, **kwargs):
|
||||||
touch(os.path.join(workdir, "subdir", "first"))
|
touch(os.path.join(workdir, "subdir", "first"))
|
||||||
touch(os.path.join(workdir, "subdir", "second"))
|
touch(os.path.join(workdir, "subdir", "second"))
|
||||||
|
|
||||||
run.side_effect = process
|
with mock.patch("pungi.wrappers.scm.run") as run:
|
||||||
|
run.side_effect = process
|
||||||
|
retval = scm.get_dir_from_scm(
|
||||||
|
{
|
||||||
|
"scm": "git",
|
||||||
|
"repo": "git://example.com/git/repo.git",
|
||||||
|
"dir": "subdir",
|
||||||
|
"options": config,
|
||||||
|
},
|
||||||
|
self.destdir,
|
||||||
|
)
|
||||||
|
|
||||||
retval = scm.get_dir_from_scm(
|
|
||||||
{"scm": "git", "repo": "git://example.com/git/repo.git", "dir": "subdir"},
|
|
||||||
self.destdir,
|
|
||||||
)
|
|
||||||
self.assertStructure(retval, ["first", "second"])
|
self.assertStructure(retval, ["first", "second"])
|
||||||
self.assertCalls(run, "git://example.com/git/repo.git", "master")
|
self.assertCalls(
|
||||||
|
run, "git://example.com/git/repo.git", "master", with_creds=bool(config)
|
||||||
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.wrappers.scm.run")
|
@mock.patch("pungi.wrappers.scm.run")
|
||||||
def test_get_dir_and_generate(self, run):
|
def test_get_dir_and_generate(self, run):
|
||||||
|
@ -16,7 +16,7 @@ import six
|
|||||||
from pungi import compose
|
from pungi import compose
|
||||||
from pungi import util
|
from pungi import util
|
||||||
|
|
||||||
from tests.helpers import touch, PungiTestCase, mk_boom
|
from tests.helpers import touch, PungiTestCase, mk_boom, GIT_WITH_CREDS
|
||||||
|
|
||||||
|
|
||||||
class TestGitRefResolver(unittest.TestCase):
|
class TestGitRefResolver(unittest.TestCase):
|
||||||
@ -32,6 +32,20 @@ class TestGitRefResolver(unittest.TestCase):
|
|||||||
universal_newlines=True,
|
universal_newlines=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@mock.patch("pungi.util.run")
|
||||||
|
def test_successful_resolve_with_credentials(self, run):
|
||||||
|
run.return_value = (0, "CAFEBABE\tHEAD\n")
|
||||||
|
|
||||||
|
url = util.resolve_git_url(
|
||||||
|
"https://git.example.com/repo.git?somedir#HEAD", "!ch"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(url, "https://git.example.com/repo.git?somedir#CAFEBABE")
|
||||||
|
run.assert_called_once_with(
|
||||||
|
GIT_WITH_CREDS + ["ls-remote", "https://git.example.com/repo.git", "HEAD"],
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.util.run")
|
@mock.patch("pungi.util.run")
|
||||||
def test_successful_resolve_branch(self, run):
|
def test_successful_resolve_branch(self, run):
|
||||||
run.return_value = (0, "CAFEBABE\trefs/heads/f24\n")
|
run.return_value = (0, "CAFEBABE\trefs/heads/f24\n")
|
||||||
@ -211,11 +225,12 @@ class TestGitRefResolver(unittest.TestCase):
|
|||||||
self.assertEqual(resolver(url2), "2")
|
self.assertEqual(resolver(url2), "2")
|
||||||
self.assertEqual(resolver(url3, ref2), "beef")
|
self.assertEqual(resolver(url3, ref2), "beef")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mock_resolve_url.call_args_list, [mock.call(url1), mock.call(url2)]
|
mock_resolve_url.call_args_list,
|
||||||
|
[mock.call(url1, None), mock.call(url2, None)],
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mock_resolve_ref.call_args_list,
|
mock_resolve_ref.call_args_list,
|
||||||
[mock.call(url3, ref1), mock.call(url3, ref2)],
|
[mock.call(url3, ref1, None), mock.call(url3, ref2, None)],
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.util.resolve_git_url")
|
@mock.patch("pungi.util.resolve_git_url")
|
||||||
@ -227,7 +242,7 @@ class TestGitRefResolver(unittest.TestCase):
|
|||||||
resolver(url)
|
resolver(url)
|
||||||
with self.assertRaises(util.GitUrlResolveError):
|
with self.assertRaises(util.GitUrlResolveError):
|
||||||
resolver(url)
|
resolver(url)
|
||||||
self.assertEqual(mock_resolve.call_args_list, [mock.call(url)])
|
self.assertEqual(mock_resolve.call_args_list, [mock.call(url, None)])
|
||||||
|
|
||||||
|
|
||||||
class TestGetVariantData(unittest.TestCase):
|
class TestGetVariantData(unittest.TestCase):
|
||||||
|
Loading…
Reference in New Issue
Block a user