Compare commits
198 Commits
pungi-4.3.
...
master
Author | SHA1 | Date |
---|---|---|
Stepan Oksanichenko | bc8c776872 | |
Stepan Oksanichenko | 91d282708e | |
Stepan Oksanichenko | ccaf31bc87 | |
Stepan Oksanichenko | 5fe0504265 | |
Stepan Oksanichenko | d79f163685 | |
Stepan Oksanichenko | 793fb23958 | |
Stepan Oksanichenko | 65d0c09e97 | |
Stepan Oksanichenko | 0a9e5df66c | |
Stepan Oksanichenko | ae527a2e01 | |
Aditya Bisoi | 4991144a01 | |
Lubomír Sedlář | 68d94ff488 | |
Ozan Unsal | ce45fdc39a | |
Lubomír Sedlář | b625ccea06 | |
Lubomír Sedlář | 8eccfc5a03 | |
Lubomír Sedlář | f5a0e06af5 | |
Lubomír Sedlář | f6f54b56ca | |
Aditya Bisoi | fcee346c7c | |
Lubomír Sedlář | 82ec38ad60 | |
Lubomír Sedlář | c9cbd80569 | |
Aditya Bisoi | 035fca1e6d | |
Lubomír Sedlář | 0f8cae69b7 | |
Lubomír Sedlář | f17628dd5f | |
Lubomír Sedlář | f3485410ad | |
Haibo Lin | cccfaea14e | |
Lubomír Sedlář | e2057b75c5 | |
Lubomír Sedlář | 44ea4d4419 | |
Lubomír Sedlář | d4425f7935 | |
Lubomír Sedlář | c8118527ea | |
Lubomír Sedlář | a8ea322907 | |
Lubomír Sedlář | c4995c8f4b | |
Lubomír Sedlář | 997e372f25 | |
Lubomír Sedlář | 42f1c62528 | |
Lubomír Sedlář | 3fd29d0ee0 | |
Lubomír Sedlář | c1f2fa5035 | |
Aurélien Bompard | 85c9e9e776 | |
Lubomír Sedlář | 33012ab31e | |
Lubomír Sedlář | 72ddf65e62 | |
Haibo Lin | c402ff3d60 | |
Haibo Lin | 8dd344f9ee | |
Lubomír Sedlář | d07f517a90 | |
Lubomír Sedlář | 48366177cc | |
Lubomír Sedlář | 4cb8671fe4 | |
Lubomír Sedlář | 135bbbfe7e | |
Lubomír Sedlář | 5624829564 | |
Haibo Lin | 5fb4f86312 | |
Lubomír Sedlář | e891fe7b09 | |
Haibo Lin | 4cd7d39914 | |
Lubomír Sedlář | 5de829d05b | |
Lubomír Sedlář | 2930a1cc54 | |
Lubomír Sedlář | 9c4d3d496d | |
Haibo Lin | 4637fd6697 | |
Lubomír Sedlář | 2ff8132eaf | |
Lubomír Sedlář | f9190d1fd1 | |
Lubomír Sedlář | 80ad0448ec | |
Lubomír Sedlář | 027380f969 | |
Lubomír Sedlář | 41048f60b7 | |
Ondrej Nosek | 9f8f6a7956 | |
Lubomír Sedlář | 3d3e4bafdf | |
Lubomír Sedlář | 8fe0257e93 | |
Fedora Release Engineering | d7b5fd2278 | |
Lubomír Sedlář | 8b49d4ad61 | |
Lubomír Sedlář | 57443cd0aa | |
Python Maint | 1d146bb8d5 | |
Lubomír Sedlář | 790091b7d7 | |
Lubomír Sedlář | 28aad3ea40 | |
Pierre-Yves Chibon | 7373b4dbbf | |
Lubomír Sedlář | 218b11f1b7 | |
Haibo Lin | bfbe9095d2 | |
Lubomír Sedlář | eb17182c04 | |
Stepan Oksanichenko | f91f90cf64 | |
Stepan Oksanichenko | 49931082b2 | |
Stepan Oksanichenko | 8ba8609bda | |
Stepan Oksanichenko | 6f495a8133 | |
Stepan Oksanichenko | 2b4bddbfe0 | |
Stepan Oksanichenko | 032cf725de | |
Stepan Oksanichenko | 8b11bb81af | |
soksanichenko | 114a73f100 | |
soksanichenko | 1c3e5dce5e | |
soksanichenko | e55abb17f1 | |
soksanichenko | e81d78a1d1 | |
soksanichenko | 68915d04f8 | |
soksanichenko | a25bf72fb8 | |
Stepan Oksanichenko | 68aee1fa2d | |
soksanichenko | 6592735aec | |
soksanichenko | 943fd8e77d | |
soksanichenko | 004fc4382f | |
soksanichenko | 596c5c0b7f | |
soksanichenko | 141d00e941 | |
soksanichenko | 4b64d20826 | |
soksanichenko | 0747e967b0 | |
soksanichenko | 6d58bc2ed8 | |
Stepan Oksanichenko | 60a347a4a2 | |
soksanichenko | 53ed7386f3 | |
soksanichenko | ed43f0038e | |
soksanichenko | fcc9b4f1ca | |
soksanichenko | d32c293bca | |
soksanichenko | f0bd1af999 | |
soksanichenko | 1b4747b915 | |
Lubomír Sedlář | 6aabfc9285 | |
Tomáš Hozza | 9e014fed6a | |
Tomáš Hozza | 7ccb1d4849 | |
Tomáš Hozza | abec28256d | |
Lubomír Sedlář | 46216b4f17 | |
Lubomír Sedlář | 02b3adbaeb | |
Lubomír Sedlář | d17e578645 | |
Lubomír Sedlář | 6c1c9d9efd | |
Stepan Oksanichenko | 8dd7d8326f | |
soksanichenko | d7b173cae5 | |
soksanichenko | fa4640f03e | |
Stepan Oksanichenko | d66eb0dea8 | |
soksanichenko | d56227ab4a | |
soksanichenko | 12433157dd | |
soksanichenko | 623955cb1f | |
soksanichenko | 4e0d2d14c9 | |
soksanichenko | b61e59d676 | |
soksanichenko | eb35d7baac | |
soksanichenko | 54209f3643 | |
soksanichenko | 80c4536eaa | |
soksanichenko | 9bb5550d36 | |
soksanichenko | 364ed6c3af | |
soksanichenko | 0b965096ee | |
soksanichenko | d914626d92 | |
soksanichenko | 32215d955a | |
soksanichenko | d711f8a2d6 | |
soksanichenko | bd9d800b52 | |
soksanichenko | e03648589d | |
soksanichenko | b5fe2e8129 | |
soksanichenko | b14e85324c | |
soksanichenko | 5a19ad2258 | |
soksanichenko | 9ae49dae5b | |
soksanichenko | c82cbfdc32 | |
soksanichenko | ee9c9a74e6 | |
soksanichenko | ea0f933315 | |
soksanichenko | 323d31df2b | |
soksanichenko | 9acd7f5fa4 | |
soksanichenko | a2b16eb44f | |
soksanichenko | ff946d3f7b | |
soksanichenko | ede91bcd03 | |
soksanichenko | 0fa459eb9e | |
soksanichenko | b49ffee06d | |
soksanichenko | fce5493f09 | |
soksanichenko | 750499eda1 | |
soksanichenko | d999960235 | |
soksanichenko | 6edece449d | |
Stepan Oksanichenko | dd22d94a9e | |
soksanichenko | b157a1825a | |
soksanichenko | fd298d4f17 | |
soksanichenko | f21ed6f607 | |
Stepan Oksanichenko | cfe6ec3f4e | |
soksanichenko | e6c6f74176 | |
soksanichenko | 8676941655 | |
soksanichenko | 5f74175c33 | |
soksanichenko | 1e18e8995d | |
soksanichenko | 38ea822260 | |
soksanichenko | 34eb45c7ec | |
soksanichenko | 7422d1e045 | |
soksanichenko | 97801e772e | |
soksanichenko | dff346eedb | |
soksanichenko | de53dd0bbd | |
soksanichenko | 88121619bc | |
soksanichenko | 0484426e0c | |
soksanichenko | b9d86b90e1 | |
soksanichenko | 58a16e5688 | |
soksanichenko | f2ed64d952 | |
stepan_oksanichenko | b2c49dcaf6 | |
stepan_oksanichenko | 14dd6a195f | |
stepan_oksanichenko | 084321dd97 | |
stepan_oksanichenko | 941d6b064a | |
stepan_oksanichenko | aaeee7132d | |
stepan_oksanichenko | cc4d99441c | |
stepan_oksanichenko | a435eeed06 | |
stepan_oksanichenko | b9f554bf39 | |
stepan_oksanichenko | ebf028ca3b | |
stepan_oksanichenko | 305103a38e | |
stepan_oksanichenko | 01bce26275 | |
soksanichenko | 4d763514c1 | |
Danylo Kuropiatnyk | 41381df6a5 | |
soksanichenko | 02686d7bdf | |
soksanichenko | 2e48c9a56f | |
soksanichenko | b3a8c3f28a | |
soksanichenko | 5434d24027 | |
soksanichenko | 3b5501b4bf | |
soksanichenko | cea8d92906 | |
soksanichenko | 1a29de435e | |
soksanichenko | 69ed7699e8 | |
Stepan Oksanichenko | 103c3dc608 | |
Stepan Oksanichenko | 94ad7603b8 | |
oshyshatskyi | 903db91c0f | |
oshyshatskyi | 552343fffe | |
oshyshatskyi | 5806217041 | |
Andrew Lukoshko | 67eacf8483 | |
Ken Dreyer | 38789d07ee | |
Lubomír Sedlář | 3735aaa443 | |
Haibo Lin | 2c1603c414 | |
Haibo Lin | f2fd10b0ab | |
Sergey Fokin | ac601ab8ea | |
oshyshatskyi | 757a6ed653 | |
Oleksandr Shyshatskyi | b2e439e561 |
|
@ -2,6 +2,7 @@ include AUTHORS
|
||||||
include COPYING
|
include COPYING
|
||||||
include GPL
|
include GPL
|
||||||
include pungi.spec
|
include pungi.spec
|
||||||
|
include setup.cfg
|
||||||
include tox.ini
|
include tox.ini
|
||||||
include share/*
|
include share/*
|
||||||
include share/multilib/*
|
include share/multilib/*
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
# Clean up pungi cache
|
||||||
|
d /var/cache/pungi/createrepo_c/ - - - 30d
|
142
doc/conf.py
142
doc/conf.py
|
@ -18,12 +18,12 @@ import os
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
#sys.path.insert(0, os.path.abspath('.'))
|
# sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
#needs_sphinx = '1.0'
|
# needs_sphinx = '1.0'
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
|
@ -31,207 +31,201 @@ import os
|
||||||
extensions = []
|
extensions = []
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
#source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'Pungi'
|
project = "Pungi"
|
||||||
copyright = u'2016, Red Hat, Inc.'
|
copyright = "2016, Red Hat, Inc."
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = '4.3'
|
version = "4.5"
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = '4.3.6'
|
release = "4.5.0"
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
#language = None
|
# language = None
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
#today = ''
|
# today = ''
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
#today_fmt = '%B %d, %Y'
|
# today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ['_build']
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
# documents.
|
# documents.
|
||||||
#default_role = None
|
# default_role = None
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
#add_function_parentheses = True
|
# add_function_parentheses = True
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
#add_module_names = True
|
# add_module_names = True
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
#show_authors = False
|
# show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'sphinx'
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
#modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
|
|
||||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
#keep_warnings = False
|
# keep_warnings = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
html_theme = 'default'
|
html_theme = "default"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
#html_theme_options = {}
|
# html_theme_options = {}
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
#html_theme_path = []
|
# html_theme_path = []
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
#html_title = None
|
# html_title = None
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
#html_short_title = None
|
# html_short_title = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
#html_logo = None
|
# html_logo = None
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
#html_favicon = None
|
# html_favicon = None
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ['_static']
|
html_static_path = ["_static"]
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
# directly to the root of the documentation.
|
# directly to the root of the documentation.
|
||||||
#html_extra_path = []
|
# html_extra_path = []
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
#html_last_updated_fmt = '%b %d, %Y'
|
# html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
#html_use_smartypants = True
|
# html_use_smartypants = True
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
#html_sidebars = {}
|
# html_sidebars = {}
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
#html_additional_pages = {}
|
# html_additional_pages = {}
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#html_domain_indices = True
|
# html_domain_indices = True
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
#html_use_index = True
|
# html_use_index = True
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
#html_split_index = False
|
# html_split_index = False
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
#html_show_sourcelink = True
|
# html_show_sourcelink = True
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
#html_show_sphinx = True
|
# html_show_sphinx = True
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
#html_show_copyright = True
|
# html_show_copyright = True
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
#html_use_opensearch = ''
|
# html_use_opensearch = ''
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
#html_file_suffix = None
|
# html_file_suffix = None
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = 'Pungidoc'
|
htmlhelp_basename = "Pungidoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
#'papersize': 'letterpaper',
|
#'papersize': 'letterpaper',
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
#'pointsize': '10pt',
|
||||||
#'pointsize': '10pt',
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
#'preamble': '',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index', 'Pungi.tex', u'Pungi Documentation',
|
("index", "Pungi.tex", "Pungi Documentation", "Daniel Mach", "manual"),
|
||||||
u'Daniel Mach', 'manual'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
#latex_logo = None
|
# latex_logo = None
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
#latex_use_parts = False
|
# latex_use_parts = False
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
# If true, show page references after internal links.
|
||||||
#latex_show_pagerefs = False
|
# latex_show_pagerefs = False
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#latex_show_urls = False
|
# latex_show_urls = False
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#latex_appendices = []
|
# latex_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#latex_domain_indices = True
|
# latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output ---------------------------------------
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [
|
man_pages = [("index", "pungi", "Pungi Documentation", ["Daniel Mach"], 1)]
|
||||||
('index', 'pungi', u'Pungi Documentation',
|
|
||||||
[u'Daniel Mach'], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
#man_show_urls = False
|
# man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output -------------------------------------------
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
|
@ -240,19 +234,25 @@ man_pages = [
|
||||||
# (source start file, target name, title, author,
|
# (source start file, target name, title, author,
|
||||||
# dir menu entry, description, category)
|
# dir menu entry, description, category)
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
('index', 'Pungi', u'Pungi Documentation',
|
(
|
||||||
u'Daniel Mach', 'Pungi', 'One line description of project.',
|
"index",
|
||||||
'Miscellaneous'),
|
"Pungi",
|
||||||
|
"Pungi Documentation",
|
||||||
|
"Daniel Mach",
|
||||||
|
"Pungi",
|
||||||
|
"One line description of project.",
|
||||||
|
"Miscellaneous",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
#texinfo_appendices = []
|
# texinfo_appendices = []
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
#texinfo_domain_indices = True
|
# texinfo_domain_indices = True
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
#texinfo_show_urls = 'footnote'
|
# texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
#texinfo_no_detailmenu = False
|
# texinfo_no_detailmenu = False
|
||||||
|
|
|
@ -194,6 +194,17 @@ Options
|
||||||
Tracking Service Kerberos authentication. If not defined, the default
|
Tracking Service Kerberos authentication. If not defined, the default
|
||||||
Kerberos principal is used.
|
Kerberos principal is used.
|
||||||
|
|
||||||
|
**cts_oidc_token_url**
|
||||||
|
(*str*) -- URL to the OIDC token endpoint.
|
||||||
|
For example ``https://oidc.example.com/openid-connect/token``.
|
||||||
|
This option can be overridden by the environment variable ``CTS_OIDC_TOKEN_URL``.
|
||||||
|
|
||||||
|
**cts_oidc_client_id*
|
||||||
|
(*str*) -- OIDC client ID.
|
||||||
|
This option can be overridden by the environment variable ``CTS_OIDC_CLIENT_ID``.
|
||||||
|
Note that environment variable ``CTS_OIDC_CLIENT_SECRET`` must be configured with
|
||||||
|
corresponding client secret to authenticate to CTS via OIDC.
|
||||||
|
|
||||||
**compose_type**
|
**compose_type**
|
||||||
(*str*) -- Allows to set default compose type. Type set via a command-line
|
(*str*) -- Allows to set default compose type. Type set via a command-line
|
||||||
option overwrites this.
|
option overwrites this.
|
||||||
|
@ -581,6 +592,16 @@ Options
|
||||||
with everything. Set this option to ``False`` to ignore ``noarch`` in
|
with everything. Set this option to ``False`` to ignore ``noarch`` in
|
||||||
``ExclusiveArch`` and always consider only binary architectures.
|
``ExclusiveArch`` and always consider only binary architectures.
|
||||||
|
|
||||||
|
**pkgset_inherit_exclusive_arch_to_noarch** = True
|
||||||
|
(*bool*) -- When set to ``True``, the value of ``ExclusiveArch`` or
|
||||||
|
``ExcludeArch`` will be copied from source rpm to all its noarch packages.
|
||||||
|
That will than limit which architectures the noarch packages can be
|
||||||
|
included in.
|
||||||
|
|
||||||
|
By setting this option to ``False`` this step is skipped, and noarch
|
||||||
|
packages will by default land in all architectures. They can still be
|
||||||
|
excluded by listing them in a relevant section of ``filter_packages``.
|
||||||
|
|
||||||
**pkgset_allow_reuse** = True
|
**pkgset_allow_reuse** = True
|
||||||
(*bool*) -- When set to ``True``, *Pungi* will try to reuse pkgset data
|
(*bool*) -- When set to ``True``, *Pungi* will try to reuse pkgset data
|
||||||
from the old composes specified by ``--old-composes``. When enabled, this
|
from the old composes specified by ``--old-composes``. When enabled, this
|
||||||
|
@ -920,6 +941,10 @@ Options
|
||||||
comps file can not be found in the package set. When disabled (the
|
comps file can not be found in the package set. When disabled (the
|
||||||
default), such cases are still reported as warnings in the log.
|
default), such cases are still reported as warnings in the log.
|
||||||
|
|
||||||
|
With ``dnf`` gather backend, this option will abort the compose on any
|
||||||
|
missing package no matter if it's listed in comps, ``additional_packages``
|
||||||
|
or prepopulate file.
|
||||||
|
|
||||||
**gather_source_mapping**
|
**gather_source_mapping**
|
||||||
(*str*) -- JSON mapping with initial packages for the compose. The value
|
(*str*) -- JSON mapping with initial packages for the compose. The value
|
||||||
should be a path to JSON file with following mapping: ``{variant: {arch:
|
should be a path to JSON file with following mapping: ``{variant: {arch:
|
||||||
|
@ -1607,8 +1632,23 @@ OSBuild Composer for building images
|
||||||
* ``release`` -- release part of the final NVR. If neither this option nor
|
* ``release`` -- release part of the final NVR. If neither this option nor
|
||||||
the global ``osbuild_release`` is set, Koji will automatically generate a
|
the global ``osbuild_release`` is set, Koji will automatically generate a
|
||||||
value.
|
value.
|
||||||
* ``repo`` -- a list of repository URLs from which to consume packages for
|
* ``repo`` -- a list of repositories from which to consume packages for
|
||||||
building the image. By default only the variant repository is used.
|
building the image. By default only the variant repository is used.
|
||||||
|
The list items may use one of the following formats:
|
||||||
|
|
||||||
|
* String with just the repository URL.
|
||||||
|
|
||||||
|
* Dictionary with the following keys:
|
||||||
|
|
||||||
|
* ``baseurl`` -- URL of the repository.
|
||||||
|
* ``package_sets`` -- a list of package set names to use for this
|
||||||
|
repository. Package sets are an internal concept of Image Builder
|
||||||
|
and are used in image definitions. If specified, the repository is
|
||||||
|
used by Image Builder only for the pipeline with the same name.
|
||||||
|
For example, specifying the ``build`` package set name will make
|
||||||
|
the repository to be used only for the build environment in which
|
||||||
|
the image will be built. (optional)
|
||||||
|
|
||||||
* ``arches`` -- list of architectures for which to build the image. By
|
* ``arches`` -- list of architectures for which to build the image. By
|
||||||
default, the variant arches are used. This option can only restrict it,
|
default, the variant arches are used. This option can only restrict it,
|
||||||
not add a new one.
|
not add a new one.
|
||||||
|
@ -1641,13 +1681,13 @@ OSBuild Composer for building images
|
||||||
* ``tenant_id`` -- Azure tenant ID to upload the image to
|
* ``tenant_id`` -- Azure tenant ID to upload the image to
|
||||||
* ``subscription_id`` -- Azure subscription ID to upload the image to
|
* ``subscription_id`` -- Azure subscription ID to upload the image to
|
||||||
* ``resource_group`` -- Azure resource group to upload the image to
|
* ``resource_group`` -- Azure resource group to upload the image to
|
||||||
* ``location`` -- Azure location to upload the image to
|
* ``location`` -- Azure location of the resource group (optional)
|
||||||
* ``image_name`` -- Image name of the uploaded Azure image (optional)
|
* ``image_name`` -- Image name of the uploaded Azure image (optional)
|
||||||
|
|
||||||
* **GCP upload options** -- upload to Google Cloud Platform.
|
* **GCP upload options** -- upload to Google Cloud Platform.
|
||||||
|
|
||||||
* ``region`` -- GCP region to upload the image to
|
* ``region`` -- GCP region to upload the image to
|
||||||
* ``bucket`` -- GCP bucket to upload the image to
|
* ``bucket`` -- GCP bucket to upload the image to (optional)
|
||||||
* ``share_with_accounts`` -- list of GCP accounts to share the image
|
* ``share_with_accounts`` -- list of GCP accounts to share the image
|
||||||
with
|
with
|
||||||
* ``image_name`` -- Image name of the uploaded GCP image (optional)
|
* ``image_name`` -- Image name of the uploaded GCP image (optional)
|
||||||
|
@ -1764,6 +1804,8 @@ repository with a new commit.
|
||||||
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
* ``tag_ref`` -- (*bool*, default ``True``) If set to ``False``, a git
|
||||||
reference will not be created.
|
reference will not be created.
|
||||||
* ``ostree_ref`` -- (*str*) To override value ``ref`` from ``treefile``.
|
* ``ostree_ref`` -- (*str*) To override value ``ref`` from ``treefile``.
|
||||||
|
* ``runroot_packages`` -- (*list*) A list of additional package names to be
|
||||||
|
installed in the runroot environment in Koji.
|
||||||
|
|
||||||
Example config
|
Example config
|
||||||
--------------
|
--------------
|
||||||
|
|
|
@ -19,7 +19,7 @@ Contents:
|
||||||
scm_support
|
scm_support
|
||||||
messaging
|
messaging
|
||||||
gathering
|
gathering
|
||||||
|
koji
|
||||||
comps
|
comps
|
||||||
contributing
|
contributing
|
||||||
testing
|
testing
|
||||||
multi_compose
|
|
||||||
|
|
|
@ -0,0 +1,105 @@
|
||||||
|
======================
|
||||||
|
Getting data from koji
|
||||||
|
======================
|
||||||
|
|
||||||
|
When Pungi is configured to get packages from a Koji tag, it somehow needs to
|
||||||
|
access the actual RPM files.
|
||||||
|
|
||||||
|
Historically, this required the storage used by Koji to be directly available
|
||||||
|
on the host where Pungi was running. This was usually achieved by using NFS for
|
||||||
|
the Koji volume, and mounting it on the compose host.
|
||||||
|
|
||||||
|
The compose could be created directly on the same volume. In such case the
|
||||||
|
packages would be hardlinked, significantly reducing space consumption.
|
||||||
|
|
||||||
|
The compose could also be created on a different storage, in which case the
|
||||||
|
packages would either need to be copied over or symlinked. Using symlinks
|
||||||
|
requires that anything that accesses the compose (e.g. a download server) would
|
||||||
|
also need to mount the Koji volume in the same location.
|
||||||
|
|
||||||
|
There is also a risk with symlinks that the package in Koji can change (due to
|
||||||
|
being resigned for example), which would invalidate composes linking to it.
|
||||||
|
|
||||||
|
|
||||||
|
Using Koji without direct mount
|
||||||
|
===============================
|
||||||
|
|
||||||
|
It is possible now to run a compose from a Koji tag without direct access to
|
||||||
|
Koji storage.
|
||||||
|
|
||||||
|
Pungi can download the packages over HTTP protocol, store them in a local
|
||||||
|
cache, and consume them from there.
|
||||||
|
|
||||||
|
The local cache has similar structure to what is on the Koji volume.
|
||||||
|
|
||||||
|
When Pungi needs some package, it has a path on Koji volume. It will replace
|
||||||
|
the ``topdir`` with the cache location. If such file exists, it will be used.
|
||||||
|
If it doesn't exist, it will be downloaded from Koji (by replacing the
|
||||||
|
``topdir`` with ``topurl``).
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Koji path /mnt/koji/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||||
|
Koji URL https://kojipkgs.fedoraproject.org/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||||
|
Local path /mnt/compose/cache/packages/foo/1/1.fc38/data/signed/abcdef/noarch/foo-1-1.fc38.noarch.rpm
|
||||||
|
|
||||||
|
The packages can be hardlinked from this cache directory.
|
||||||
|
|
||||||
|
|
||||||
|
Cleanup
|
||||||
|
-------
|
||||||
|
|
||||||
|
While the approach above allows each RPM to be downloaded only once, it will
|
||||||
|
eventually result in the Koji volume being mirrored locally. Most of the
|
||||||
|
packages will however no longer be needed.
|
||||||
|
|
||||||
|
There is a script ``pungi-cache-cleanup`` that can help with that. It can find
|
||||||
|
and remove files from the cache that are no longer needed.
|
||||||
|
|
||||||
|
A file is no longer needed if it has a single link (meaning it is only in the
|
||||||
|
cache, not in any compose), and it has mtime older than a given threshold.
|
||||||
|
|
||||||
|
It doesn't make sense to delete files that are hardlinked in an existing
|
||||||
|
compose as it would not save any space anyway.
|
||||||
|
|
||||||
|
The mtime check is meant to preserve files that are downloaded but not actually
|
||||||
|
used in a compose, like a subpackage that is not included in any variant. Every
|
||||||
|
time its existence in the local cache is checked, the mtime is updated.
|
||||||
|
|
||||||
|
|
||||||
|
Race conditions?
|
||||||
|
----------------
|
||||||
|
|
||||||
|
It should be safe to have multiple compose hosts share the same storage volume
|
||||||
|
for generated composes and local cache.
|
||||||
|
|
||||||
|
If a cache file is accessed and it exists, there's no risk of race condition.
|
||||||
|
|
||||||
|
If two composes need the same file at the same time and it is not present yet,
|
||||||
|
one of them will take a lock on it and start downloading. The other will wait
|
||||||
|
until the download is finished.
|
||||||
|
|
||||||
|
The lock is only valid for a set amount of time (5 minutes) to avoid issues
|
||||||
|
where the downloading process is killed in a way that blocks it from releasing
|
||||||
|
the lock.
|
||||||
|
|
||||||
|
If the file is large and network slow, the limit may not be enough finish
|
||||||
|
downloading. In that case the second process will steal the lock while the
|
||||||
|
first process is still downloading. This will result in the same file being
|
||||||
|
downloaded twice.
|
||||||
|
|
||||||
|
When the first process finishes the download, it will put the file into the
|
||||||
|
local cache location. When the second process finishes, it will atomically
|
||||||
|
replace it, but since it's the same file it will be the same file.
|
||||||
|
|
||||||
|
If the first compose already managed to hardlink the file before it gets
|
||||||
|
replaced, there will be two copies of the file present locally.
|
||||||
|
|
||||||
|
|
||||||
|
Integrity checking
|
||||||
|
------------------
|
||||||
|
|
||||||
|
There is minimal integrity checking. RPM packages belonging to real builds will
|
||||||
|
be check to match the checksum provided by Koji hub.
|
||||||
|
|
||||||
|
There is no checking for scratch builds or any images.
|
|
@ -1,107 +0,0 @@
|
||||||
.. _multi_compose:
|
|
||||||
|
|
||||||
Managing compose from multiple parts
|
|
||||||
====================================
|
|
||||||
|
|
||||||
There may be cases where it makes sense to split a big compose into separate
|
|
||||||
parts, but create a compose output that links all output into one familiar
|
|
||||||
structure.
|
|
||||||
|
|
||||||
The `pungi-orchestrate` tools allows that.
|
|
||||||
|
|
||||||
It works with an INI-style configuration file. The ``[general]`` section
|
|
||||||
contains information about identity of the main compose. Other sections define
|
|
||||||
individual parts.
|
|
||||||
|
|
||||||
The parts are scheduled to run in parallel, with the minimal amount of
|
|
||||||
serialization. The final compose directory will contain hard-links to the
|
|
||||||
files.
|
|
||||||
|
|
||||||
|
|
||||||
General settings
|
|
||||||
----------------
|
|
||||||
|
|
||||||
**target**
|
|
||||||
Path to directory where the final compose should be created.
|
|
||||||
**compose_type**
|
|
||||||
Type of compose to make.
|
|
||||||
**release_name**
|
|
||||||
Name of the product for the final compose.
|
|
||||||
**release_short**
|
|
||||||
Short name of the product for the final compose.
|
|
||||||
**release_version**
|
|
||||||
Version of the product for the final compose.
|
|
||||||
**release_type**
|
|
||||||
Type of the product for the final compose.
|
|
||||||
**extra_args**
|
|
||||||
Additional arguments that will be passed to the child Pungi processes.
|
|
||||||
**koji_profile**
|
|
||||||
If specified, a current event will be retrieved from the Koji instance and
|
|
||||||
used for all parts.
|
|
||||||
|
|
||||||
**kerberos**
|
|
||||||
If set to yes, a kerberos ticket will be automatically created at the start.
|
|
||||||
Set keytab and principal as well.
|
|
||||||
**kerberos_keytab**
|
|
||||||
Path to keytab file used to create the kerberos ticket.
|
|
||||||
**kerberos_principal**
|
|
||||||
Kerberos principal for the ticket
|
|
||||||
|
|
||||||
**pre_compose_script**
|
|
||||||
Commands to execute before first part is started. Can contain multiple
|
|
||||||
commands on separate lines.
|
|
||||||
**post_compose_script**
|
|
||||||
Commands to execute after the last part finishes and final status is
|
|
||||||
updated. Can contain multiple commands on separate lines. ::
|
|
||||||
|
|
||||||
post_compose_script =
|
|
||||||
compose-latest-symlink $COMPOSE_PATH
|
|
||||||
custom-post-compose-script.sh
|
|
||||||
|
|
||||||
Multiple environment variables are defined for the scripts:
|
|
||||||
|
|
||||||
* ``COMPOSE_PATH``
|
|
||||||
* ``COMPOSE_ID``
|
|
||||||
* ``COMPOSE_DATE``
|
|
||||||
* ``COMPOSE_TYPE``
|
|
||||||
* ``COMPOSE_RESPIN``
|
|
||||||
* ``COMPOSE_LABEL``
|
|
||||||
* ``RELEASE_ID``
|
|
||||||
* ``RELEASE_NAME``
|
|
||||||
* ``RELEASE_SHORT``
|
|
||||||
* ``RELEASE_VERSION``
|
|
||||||
* ``RELEASE_TYPE``
|
|
||||||
* ``RELEASE_IS_LAYERED`` – ``YES`` for layered products, empty otherwise
|
|
||||||
* ``BASE_PRODUCT_NAME`` – only set for layered products
|
|
||||||
* ``BASE_PRODUCT_SHORT`` – only set for layered products
|
|
||||||
* ``BASE_PRODUCT_VERSION`` – only set for layered products
|
|
||||||
* ``BASE_PRODUCT_TYPE`` – only set for layered products
|
|
||||||
|
|
||||||
**notification_script**
|
|
||||||
Executable name (or path to a script) that will be used to send a message
|
|
||||||
once the compose is finished. In order for a valid URL to be included in the
|
|
||||||
message, at least one part must configure path translation that would apply
|
|
||||||
to location of main compose.
|
|
||||||
|
|
||||||
Only two messages will be sent, one for start and one for finish (either
|
|
||||||
successful or not).
|
|
||||||
|
|
||||||
|
|
||||||
Partial compose settings
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
Each part should have a separate section in the config file.
|
|
||||||
|
|
||||||
It can specify these options:
|
|
||||||
|
|
||||||
**config**
|
|
||||||
Path to configuration file that describes this part. If relative, it is
|
|
||||||
resolved relative to the file with parts configuration.
|
|
||||||
**just_phase**, **skip_phase**
|
|
||||||
Customize which phases should run for this part.
|
|
||||||
**depends_on**
|
|
||||||
A comma separated list of other parts that must be finished before this part
|
|
||||||
starts.
|
|
||||||
**failable**
|
|
||||||
A boolean toggle to mark a part as failable. A failure in such part will
|
|
||||||
mark the final compose as incomplete, but still successful.
|
|
|
@ -41,6 +41,14 @@ which can contain following keys.
|
||||||
* ``command`` -- defines a shell command to run after Git clone to generate the
|
* ``command`` -- defines a shell command to run after Git clone to generate the
|
||||||
needed file (for example to run ``make``). Only supported in Git backend.
|
needed file (for example to run ``make``). Only supported in Git backend.
|
||||||
|
|
||||||
|
* ``options`` -- a dictionary of additional configuration options. These are
|
||||||
|
specific to different backends.
|
||||||
|
|
||||||
|
Currently supported values for Git:
|
||||||
|
|
||||||
|
* ``credential_helper`` -- path to a credential helper used to supply
|
||||||
|
username/password for remotes that require authentication.
|
||||||
|
|
||||||
|
|
||||||
Koji examples
|
Koji examples
|
||||||
-------------
|
-------------
|
||||||
|
|
993
pungi.spec
993
pungi.spec
File diff suppressed because it is too large
Load Diff
|
@ -227,9 +227,19 @@ def validate(config, offline=False, schema=None):
|
||||||
DefaultValidator = _extend_with_default_and_alias(
|
DefaultValidator = _extend_with_default_and_alias(
|
||||||
jsonschema.Draft4Validator, offline=offline
|
jsonschema.Draft4Validator, offline=offline
|
||||||
)
|
)
|
||||||
validator = DefaultValidator(
|
|
||||||
schema,
|
if hasattr(jsonschema.Draft4Validator, "TYPE_CHECKER"):
|
||||||
)
|
# jsonschema >= 3.0 has new interface for checking types
|
||||||
|
validator = DefaultValidator(schema)
|
||||||
|
else:
|
||||||
|
validator = DefaultValidator(
|
||||||
|
schema,
|
||||||
|
{
|
||||||
|
"array": (tuple, list),
|
||||||
|
"regex": six.string_types,
|
||||||
|
"url": six.string_types,
|
||||||
|
},
|
||||||
|
)
|
||||||
errors = []
|
errors = []
|
||||||
warnings = []
|
warnings = []
|
||||||
for error in validator.iter_errors(config):
|
for error in validator.iter_errors(config):
|
||||||
|
@ -377,6 +387,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
||||||
instance[property]["branch"] = resolver(
|
instance[property]["branch"] = resolver(
|
||||||
instance[property]["repo"],
|
instance[property]["repo"],
|
||||||
instance[property].get("branch") or "HEAD",
|
instance[property].get("branch") or "HEAD",
|
||||||
|
instance[property].get("options"),
|
||||||
)
|
)
|
||||||
|
|
||||||
for error in _hook_errors(properties, instance, schema):
|
for error in _hook_errors(properties, instance, schema):
|
||||||
|
@ -444,15 +455,18 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
||||||
context=all_errors,
|
context=all_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_array(checker, instance):
|
kwargs = {}
|
||||||
return isinstance(instance, (tuple, list))
|
if hasattr(validator_class, "TYPE_CHECKER"):
|
||||||
|
# jsonschema >= 3
|
||||||
|
def is_array(checker, instance):
|
||||||
|
return isinstance(instance, (tuple, list))
|
||||||
|
|
||||||
def is_string_type(checker, instance):
|
def is_string_type(checker, instance):
|
||||||
return isinstance(instance, six.string_types)
|
return isinstance(instance, six.string_types)
|
||||||
|
|
||||||
type_checker = validator_class.TYPE_CHECKER.redefine_many(
|
kwargs["type_checker"] = validator_class.TYPE_CHECKER.redefine_many(
|
||||||
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
{"array": is_array, "regex": is_string_type, "url": is_string_type}
|
||||||
)
|
)
|
||||||
|
|
||||||
return jsonschema.validators.extend(
|
return jsonschema.validators.extend(
|
||||||
validator_class,
|
validator_class,
|
||||||
|
@ -464,7 +478,7 @@ def _extend_with_default_and_alias(validator_class, offline=False):
|
||||||
"additionalProperties": _validate_additional_properties,
|
"additionalProperties": _validate_additional_properties,
|
||||||
"anyOf": _validate_any_of,
|
"anyOf": _validate_any_of,
|
||||||
},
|
},
|
||||||
type_checker=type_checker,
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -507,6 +521,13 @@ def make_schema():
|
||||||
"file": {"type": "string"},
|
"file": {"type": "string"},
|
||||||
"dir": {"type": "string"},
|
"dir": {"type": "string"},
|
||||||
"command": {"type": "string"},
|
"command": {"type": "string"},
|
||||||
|
"options": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"credential_helper": {"type": "string"},
|
||||||
|
},
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
},
|
},
|
||||||
|
@ -588,6 +609,7 @@ def make_schema():
|
||||||
"release_discinfo_description": {"type": "string"},
|
"release_discinfo_description": {"type": "string"},
|
||||||
"treeinfo_version": {"type": "string"},
|
"treeinfo_version": {"type": "string"},
|
||||||
"compose_type": {"type": "string", "enum": COMPOSE_TYPES},
|
"compose_type": {"type": "string", "enum": COMPOSE_TYPES},
|
||||||
|
"label": {"type": "string"},
|
||||||
"base_product_name": {"type": "string"},
|
"base_product_name": {"type": "string"},
|
||||||
"base_product_short": {"type": "string"},
|
"base_product_short": {"type": "string"},
|
||||||
"base_product_version": {"type": "string"},
|
"base_product_version": {"type": "string"},
|
||||||
|
@ -665,7 +687,11 @@ def make_schema():
|
||||||
"pkgset_allow_reuse": {"type": "boolean", "default": True},
|
"pkgset_allow_reuse": {"type": "boolean", "default": True},
|
||||||
"createiso_allow_reuse": {"type": "boolean", "default": True},
|
"createiso_allow_reuse": {"type": "boolean", "default": True},
|
||||||
"extraiso_allow_reuse": {"type": "boolean", "default": True},
|
"extraiso_allow_reuse": {"type": "boolean", "default": True},
|
||||||
"pkgset_source": {"type": "string", "enum": ["koji", "repos"]},
|
"pkgset_source": {"type": "string", "enum": [
|
||||||
|
"koji",
|
||||||
|
"repos",
|
||||||
|
"kojimock",
|
||||||
|
]},
|
||||||
"createrepo_c": {"type": "boolean", "default": True},
|
"createrepo_c": {"type": "boolean", "default": True},
|
||||||
"createrepo_checksum": {
|
"createrepo_checksum": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
@ -794,6 +820,14 @@ def make_schema():
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["lorax", "buildinstall"],
|
"enum": ["lorax", "buildinstall"],
|
||||||
},
|
},
|
||||||
|
# In phase `buildinstall` we should add to compose only the
|
||||||
|
# images that will be used only as netinstall
|
||||||
|
"netinstall_variants": {
|
||||||
|
"$ref": "#/definitions/list_of_strings",
|
||||||
|
"default": [
|
||||||
|
"BaseOS",
|
||||||
|
],
|
||||||
|
},
|
||||||
"buildinstall_topdir": {"type": "string"},
|
"buildinstall_topdir": {"type": "string"},
|
||||||
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
"buildinstall_kickstart": {"$ref": "#/definitions/str_or_scm_dict"},
|
||||||
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
|
"buildinstall_use_guestmount": {"type": "boolean", "default": True},
|
||||||
|
@ -811,8 +845,11 @@ def make_schema():
|
||||||
"pdc_insecure": {"deprecated": "Koji is queried instead"},
|
"pdc_insecure": {"deprecated": "Koji is queried instead"},
|
||||||
"cts_url": {"type": "string"},
|
"cts_url": {"type": "string"},
|
||||||
"cts_keytab": {"type": "string"},
|
"cts_keytab": {"type": "string"},
|
||||||
|
"cts_oidc_token_url": {"type": "url"},
|
||||||
|
"cts_oidc_client_id": {"type": "string"},
|
||||||
"koji_profile": {"type": "string"},
|
"koji_profile": {"type": "string"},
|
||||||
"koji_event": {"type": "number"},
|
"koji_event": {"type": "number"},
|
||||||
|
"koji_cache": {"type": "string"},
|
||||||
"pkgset_koji_tag": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_tag": {"$ref": "#/definitions/strings"},
|
||||||
"pkgset_koji_builds": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_builds": {"$ref": "#/definitions/strings"},
|
||||||
"pkgset_koji_scratch_tasks": {"$ref": "#/definitions/strings"},
|
"pkgset_koji_scratch_tasks": {"$ref": "#/definitions/strings"},
|
||||||
|
@ -830,6 +867,10 @@ def make_schema():
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"default": True,
|
"default": True,
|
||||||
},
|
},
|
||||||
|
"pkgset_inherit_exclusive_arch_to_noarch": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": True,
|
||||||
|
},
|
||||||
"pkgset_scratch_modules": {
|
"pkgset_scratch_modules": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"patternProperties": {
|
"patternProperties": {
|
||||||
|
@ -1032,6 +1073,9 @@ def make_schema():
|
||||||
"config_branch": {"type": "string"},
|
"config_branch": {"type": "string"},
|
||||||
"tag_ref": {"type": "boolean"},
|
"tag_ref": {"type": "boolean"},
|
||||||
"ostree_ref": {"type": "string"},
|
"ostree_ref": {"type": "string"},
|
||||||
|
"runroot_packages": {
|
||||||
|
"$ref": "#/definitions/list_of_strings",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"treefile",
|
"treefile",
|
||||||
|
@ -1188,14 +1232,36 @@ def make_schema():
|
||||||
},
|
},
|
||||||
"arches": {"$ref": "#/definitions/list_of_strings"},
|
"arches": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"release": {"type": "string"},
|
"release": {"type": "string"},
|
||||||
"repo": {"$ref": "#/definitions/list_of_strings"},
|
"repo": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"required": ["baseurl"],
|
||||||
|
"properties": {
|
||||||
|
"baseurl": {"type": "string"},
|
||||||
|
"package_sets": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {"type": "string"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{"type": "string"},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
"failable": {"$ref": "#/definitions/list_of_strings"},
|
"failable": {"$ref": "#/definitions/list_of_strings"},
|
||||||
"subvariant": {"type": "string"},
|
"subvariant": {"type": "string"},
|
||||||
"ostree_url": {"type": "string"},
|
"ostree_url": {"type": "string"},
|
||||||
"ostree_ref": {"type": "string"},
|
"ostree_ref": {"type": "string"},
|
||||||
"ostree_parent": {"type": "string"},
|
"ostree_parent": {"type": "string"},
|
||||||
"upload_options": {
|
"upload_options": {
|
||||||
"oneOf": [
|
# this should be really 'oneOf', but the minimal
|
||||||
|
# required properties in AWSEC2 and GCP options
|
||||||
|
# overlap.
|
||||||
|
"anyOf": [
|
||||||
# AWSEC2UploadOptions
|
# AWSEC2UploadOptions
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
@ -1234,7 +1300,6 @@ def make_schema():
|
||||||
"tenant_id",
|
"tenant_id",
|
||||||
"subscription_id",
|
"subscription_id",
|
||||||
"resource_group",
|
"resource_group",
|
||||||
"location",
|
|
||||||
],
|
],
|
||||||
"properties": {
|
"properties": {
|
||||||
"tenant_id": {"type": "string"},
|
"tenant_id": {"type": "string"},
|
||||||
|
@ -1250,7 +1315,7 @@ def make_schema():
|
||||||
{
|
{
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": ["region", "bucket"],
|
"required": ["region"],
|
||||||
"properties": {
|
"properties": {
|
||||||
"region": {"type": "string"},
|
"region": {"type": "string"},
|
||||||
"bucket": {"type": "string"},
|
"bucket": {"type": "string"},
|
||||||
|
|
170
pungi/compose.py
170
pungi/compose.py
|
@ -17,6 +17,7 @@
|
||||||
__all__ = ("Compose",)
|
__all__ = ("Compose",)
|
||||||
|
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -38,6 +39,7 @@ from dogpile.cache import make_region
|
||||||
from pungi.graph import SimpleAcyclicOrientedGraph
|
from pungi.graph import SimpleAcyclicOrientedGraph
|
||||||
from pungi.wrappers.variants import VariantsXmlParser
|
from pungi.wrappers.variants import VariantsXmlParser
|
||||||
from pungi.paths import Paths
|
from pungi.paths import Paths
|
||||||
|
from pungi.wrappers.kojiwrapper import KojiDownloadProxy
|
||||||
from pungi.wrappers.scm import get_file_from_scm
|
from pungi.wrappers.scm import get_file_from_scm
|
||||||
from pungi.util import (
|
from pungi.util import (
|
||||||
makedirs,
|
makedirs,
|
||||||
|
@ -57,14 +59,101 @@ except ImportError:
|
||||||
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
SUPPORTED_MILESTONES = ["RC", "Update", "SecurityFix"]
|
||||||
|
|
||||||
|
|
||||||
|
def is_status_fatal(status_code):
|
||||||
|
"""Check if status code returned from CTS reports an error that is unlikely
|
||||||
|
to be fixed by retrying. Generally client errors (4XX) are fatal, with the
|
||||||
|
exception of 401 Unauthorized which could be caused by transient network
|
||||||
|
issue between compose host and KDC.
|
||||||
|
"""
|
||||||
|
if status_code == 401:
|
||||||
|
return False
|
||||||
|
return status_code >= 400 and status_code < 500
|
||||||
|
|
||||||
|
|
||||||
@retry(wait_on=RequestException)
|
@retry(wait_on=RequestException)
|
||||||
def retry_request(method, url, data=None, auth=None):
|
def retry_request(method, url, data=None, json_data=None, auth=None):
|
||||||
|
"""
|
||||||
|
:param str method: Reqest method.
|
||||||
|
:param str url: Target URL.
|
||||||
|
:param dict data: form-urlencoded data to send in the body of the request.
|
||||||
|
:param dict json_data: json data to send in the body of the request.
|
||||||
|
"""
|
||||||
request_method = getattr(requests, method)
|
request_method = getattr(requests, method)
|
||||||
rv = request_method(url, json=data, auth=auth)
|
rv = request_method(url, data=data, json=json_data, auth=auth)
|
||||||
|
if is_status_fatal(rv.status_code):
|
||||||
|
try:
|
||||||
|
error = rv.json()
|
||||||
|
except ValueError:
|
||||||
|
error = rv.text
|
||||||
|
raise RuntimeError("%s responded with %d: %s" % (url, rv.status_code, error))
|
||||||
rv.raise_for_status()
|
rv.raise_for_status()
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class BearerAuth(requests.auth.AuthBase):
|
||||||
|
def __init__(self, token):
|
||||||
|
self.token = token
|
||||||
|
|
||||||
|
def __call__(self, r):
|
||||||
|
r.headers["authorization"] = "Bearer " + self.token
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def cts_auth(pungi_conf):
|
||||||
|
"""
|
||||||
|
:param dict pungi_conf: dict obj of pungi.json config.
|
||||||
|
"""
|
||||||
|
auth = None
|
||||||
|
token = None
|
||||||
|
cts_keytab = pungi_conf.get("cts_keytab")
|
||||||
|
cts_oidc_token_url = os.environ.get("CTS_OIDC_TOKEN_URL", "") or pungi_conf.get(
|
||||||
|
"cts_oidc_token_url"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if cts_keytab:
|
||||||
|
# requests-kerberos cannot accept custom keytab, we need to use
|
||||||
|
# environment variable for this. But we need to change environment
|
||||||
|
# only temporarily just for this single requests.post.
|
||||||
|
# So at first backup the current environment and revert to it
|
||||||
|
# after the requests call.
|
||||||
|
from requests_kerberos import HTTPKerberosAuth
|
||||||
|
|
||||||
|
auth = HTTPKerberosAuth()
|
||||||
|
environ_copy = dict(os.environ)
|
||||||
|
if "$HOSTNAME" in cts_keytab:
|
||||||
|
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
||||||
|
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
||||||
|
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
||||||
|
elif cts_oidc_token_url:
|
||||||
|
cts_oidc_client_id = os.environ.get(
|
||||||
|
"CTS_OIDC_CLIENT_ID", ""
|
||||||
|
) or pungi_conf.get("cts_oidc_client_id", "")
|
||||||
|
token = retry_request(
|
||||||
|
"post",
|
||||||
|
cts_oidc_token_url,
|
||||||
|
data={
|
||||||
|
"grant_type": "client_credentials",
|
||||||
|
"client_id": cts_oidc_client_id,
|
||||||
|
"client_secret": os.environ.get("CTS_OIDC_CLIENT_SECRET", ""),
|
||||||
|
},
|
||||||
|
).json()["access_token"]
|
||||||
|
auth = BearerAuth(token)
|
||||||
|
del token
|
||||||
|
|
||||||
|
yield auth
|
||||||
|
except Exception as e:
|
||||||
|
# Avoid leaking client secret in trackback
|
||||||
|
e.show_locals = False
|
||||||
|
raise e
|
||||||
|
finally:
|
||||||
|
if cts_keytab:
|
||||||
|
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
||||||
|
os.environ.clear()
|
||||||
|
os.environ.update(environ_copy)
|
||||||
|
|
||||||
|
|
||||||
def get_compose_info(
|
def get_compose_info(
|
||||||
conf,
|
conf,
|
||||||
compose_type="production",
|
compose_type="production",
|
||||||
|
@ -94,38 +183,19 @@ def get_compose_info(
|
||||||
ci.compose.type = compose_type
|
ci.compose.type = compose_type
|
||||||
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
ci.compose.date = compose_date or time.strftime("%Y%m%d", time.localtime())
|
||||||
ci.compose.respin = compose_respin or 0
|
ci.compose.respin = compose_respin or 0
|
||||||
|
ci.compose.id = ci.create_compose_id()
|
||||||
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url")
|
||||||
if cts_url:
|
if cts_url:
|
||||||
# Requests-kerberos cannot accept custom keytab, we need to use
|
# Create compose in CTS and get the reserved compose ID.
|
||||||
# environment variable for this. But we need to change environment
|
url = os.path.join(cts_url, "api/1/composes/")
|
||||||
# only temporarily just for this single requests.post.
|
data = {
|
||||||
# So at first backup the current environment and revert to it
|
"compose_info": json.loads(ci.dumps()),
|
||||||
# after the requests.post call.
|
"parent_compose_ids": parent_compose_ids,
|
||||||
cts_keytab = conf.get("cts_keytab", None)
|
"respin_of": respin_of,
|
||||||
authentication = get_authentication(conf)
|
}
|
||||||
if cts_keytab:
|
with cts_auth(conf) as authentication:
|
||||||
environ_copy = dict(os.environ)
|
rv = retry_request("post", url, json_data=data, auth=authentication)
|
||||||
if "$HOSTNAME" in cts_keytab:
|
|
||||||
cts_keytab = cts_keytab.replace("$HOSTNAME", socket.gethostname())
|
|
||||||
os.environ["KRB5_CLIENT_KTNAME"] = cts_keytab
|
|
||||||
os.environ["KRB5CCNAME"] = "DIR:%s" % tempfile.mkdtemp()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create compose in CTS and get the reserved compose ID.
|
|
||||||
ci.compose.id = ci.create_compose_id()
|
|
||||||
url = os.path.join(cts_url, "api/1/composes/")
|
|
||||||
data = {
|
|
||||||
"compose_info": json.loads(ci.dumps()),
|
|
||||||
"parent_compose_ids": parent_compose_ids,
|
|
||||||
"respin_of": respin_of,
|
|
||||||
}
|
|
||||||
rv = retry_request("post", url, data=data, auth=authentication)
|
|
||||||
finally:
|
|
||||||
if cts_keytab:
|
|
||||||
shutil.rmtree(os.environ["KRB5CCNAME"].split(":", 1)[1])
|
|
||||||
os.environ.clear()
|
|
||||||
os.environ.update(environ_copy)
|
|
||||||
|
|
||||||
# Update local ComposeInfo with received ComposeInfo.
|
# Update local ComposeInfo with received ComposeInfo.
|
||||||
cts_ci = ComposeInfo()
|
cts_ci = ComposeInfo()
|
||||||
|
@ -133,22 +203,9 @@ def get_compose_info(
|
||||||
ci.compose.respin = cts_ci.compose.respin
|
ci.compose.respin = cts_ci.compose.respin
|
||||||
ci.compose.id = cts_ci.compose.id
|
ci.compose.id = cts_ci.compose.id
|
||||||
|
|
||||||
else:
|
|
||||||
ci.compose.id = ci.create_compose_id()
|
|
||||||
|
|
||||||
return ci
|
return ci
|
||||||
|
|
||||||
|
|
||||||
def get_authentication(conf):
|
|
||||||
authentication = None
|
|
||||||
cts_keytab = conf.get("cts_keytab", None)
|
|
||||||
if cts_keytab:
|
|
||||||
from requests_kerberos import HTTPKerberosAuth
|
|
||||||
|
|
||||||
authentication = HTTPKerberosAuth()
|
|
||||||
return authentication
|
|
||||||
|
|
||||||
|
|
||||||
def write_compose_info(compose_dir, ci):
|
def write_compose_info(compose_dir, ci):
|
||||||
"""
|
"""
|
||||||
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
Write ComposeInfo `ci` to `compose_dir` subdirectories.
|
||||||
|
@ -162,17 +219,20 @@ def write_compose_info(compose_dir, ci):
|
||||||
|
|
||||||
|
|
||||||
def update_compose_url(compose_id, compose_dir, conf):
|
def update_compose_url(compose_id, compose_dir, conf):
|
||||||
authentication = get_authentication(conf)
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url", None)
|
||||||
if cts_url:
|
if cts_url:
|
||||||
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
url = os.path.join(cts_url, "api/1/composes", compose_id)
|
||||||
tp = conf.get("translate_paths", None)
|
tp = conf.get("translate_paths", None)
|
||||||
compose_url = translate_path_raw(tp, compose_dir)
|
compose_url = translate_path_raw(tp, compose_dir)
|
||||||
|
if compose_url == compose_dir:
|
||||||
|
# We do not have a URL, do not attempt the update.
|
||||||
|
return
|
||||||
data = {
|
data = {
|
||||||
"action": "set_url",
|
"action": "set_url",
|
||||||
"compose_url": compose_url,
|
"compose_url": compose_url,
|
||||||
}
|
}
|
||||||
return retry_request("patch", url, data=data, auth=authentication)
|
with cts_auth(conf) as authentication:
|
||||||
|
return retry_request("patch", url, json_data=data, auth=authentication)
|
||||||
|
|
||||||
|
|
||||||
def get_compose_dir(
|
def get_compose_dir(
|
||||||
|
@ -183,11 +243,19 @@ def get_compose_dir(
|
||||||
compose_respin=None,
|
compose_respin=None,
|
||||||
compose_label=None,
|
compose_label=None,
|
||||||
already_exists_callbacks=None,
|
already_exists_callbacks=None,
|
||||||
|
parent_compose_ids=None,
|
||||||
|
respin_of=None,
|
||||||
):
|
):
|
||||||
already_exists_callbacks = already_exists_callbacks or []
|
already_exists_callbacks = already_exists_callbacks or []
|
||||||
|
|
||||||
ci = get_compose_info(
|
ci = get_compose_info(
|
||||||
conf, compose_type, compose_date, compose_respin, compose_label
|
conf,
|
||||||
|
compose_type,
|
||||||
|
compose_date,
|
||||||
|
compose_respin,
|
||||||
|
compose_label,
|
||||||
|
parent_compose_ids,
|
||||||
|
respin_of,
|
||||||
)
|
)
|
||||||
|
|
||||||
cts_url = conf.get("cts_url", None)
|
cts_url = conf.get("cts_url", None)
|
||||||
|
@ -342,6 +410,8 @@ class Compose(kobo.log.LoggingBase):
|
||||||
else:
|
else:
|
||||||
self.cache_region = make_region().configure("dogpile.cache.null")
|
self.cache_region = make_region().configure("dogpile.cache.null")
|
||||||
|
|
||||||
|
self.koji_downloader = KojiDownloadProxy.from_config(self.conf, self._logger)
|
||||||
|
|
||||||
get_compose_info = staticmethod(get_compose_info)
|
get_compose_info = staticmethod(get_compose_info)
|
||||||
write_compose_info = staticmethod(write_compose_info)
|
write_compose_info = staticmethod(write_compose_info)
|
||||||
get_compose_dir = staticmethod(get_compose_dir)
|
get_compose_dir = staticmethod(get_compose_dir)
|
||||||
|
@ -637,7 +707,7 @@ class Compose(kobo.log.LoggingBase):
|
||||||
separators=(",", ": "),
|
separators=(",", ": "),
|
||||||
)
|
)
|
||||||
|
|
||||||
def traceback(self, detail=None):
|
def traceback(self, detail=None, show_locals=True):
|
||||||
"""Store an extended traceback. This method should only be called when
|
"""Store an extended traceback. This method should only be called when
|
||||||
handling an exception.
|
handling an exception.
|
||||||
|
|
||||||
|
@ -649,7 +719,7 @@ class Compose(kobo.log.LoggingBase):
|
||||||
tb_path = self.paths.log.log_file("global", basename)
|
tb_path = self.paths.log.log_file("global", basename)
|
||||||
self.log_error("Extended traceback in: %s", tb_path)
|
self.log_error("Extended traceback in: %s", tb_path)
|
||||||
with open(tb_path, "wb") as f:
|
with open(tb_path, "wb") as f:
|
||||||
f.write(kobo.tback.Traceback().get_traceback())
|
f.write(kobo.tback.Traceback(show_locals=show_locals).get_traceback())
|
||||||
|
|
||||||
def load_old_compose_config(self):
|
def load_old_compose_config(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -5,11 +5,14 @@ from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
import six
|
import six
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
from kobo.shortcuts import run
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
|
|
||||||
from .wrappers import iso
|
from .wrappers import iso
|
||||||
from .wrappers.jigdo import JigdoWrapper
|
from .wrappers.jigdo import JigdoWrapper
|
||||||
|
|
||||||
|
from .phases.buildinstall import BOOT_CONFIGS, BOOT_IMAGES
|
||||||
|
|
||||||
|
|
||||||
CreateIsoOpts = namedtuple(
|
CreateIsoOpts = namedtuple(
|
||||||
"CreateIsoOpts",
|
"CreateIsoOpts",
|
||||||
|
@ -118,23 +121,73 @@ def make_jigdo(f, opts):
|
||||||
emit(f, cmd)
|
emit(f, cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_perms(fs_path):
|
||||||
|
"""Compute proper permissions for a file.
|
||||||
|
|
||||||
|
This mimicks what -rational-rock option of genisoimage does. All read bits
|
||||||
|
are set, so that files and directories are globally readable. If any
|
||||||
|
execute bit is set for a file, set them all. No writes are allowed and
|
||||||
|
special bits are erased too.
|
||||||
|
"""
|
||||||
|
statinfo = os.stat(fs_path)
|
||||||
|
perms = 0o444
|
||||||
|
if statinfo.st_mode & 0o111:
|
||||||
|
perms |= 0o111
|
||||||
|
return perms
|
||||||
|
|
||||||
|
|
||||||
def write_xorriso_commands(opts):
|
def write_xorriso_commands(opts):
|
||||||
|
# Create manifest for the boot.iso listing all contents
|
||||||
|
boot_iso_manifest = "%s.manifest" % os.path.join(
|
||||||
|
opts.script_dir, os.path.basename(opts.boot_iso)
|
||||||
|
)
|
||||||
|
run(
|
||||||
|
iso.get_manifest_cmd(
|
||||||
|
opts.boot_iso, opts.use_xorrisofs, output_file=boot_iso_manifest
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Find which files may have been updated by pungi. This only includes a few
|
||||||
|
# files from tweaking buildinstall and .discinfo metadata. There's no good
|
||||||
|
# way to detect whether the boot config files actually changed, so we may
|
||||||
|
# be updating files in the ISO with the same data.
|
||||||
|
UPDATEABLE_FILES = set(BOOT_IMAGES + BOOT_CONFIGS + [".discinfo"])
|
||||||
|
updated_files = set()
|
||||||
|
excluded_files = set()
|
||||||
|
with open(boot_iso_manifest) as f:
|
||||||
|
for line in f:
|
||||||
|
path = line.lstrip("/").rstrip("\n")
|
||||||
|
if path in UPDATEABLE_FILES:
|
||||||
|
updated_files.add(path)
|
||||||
|
else:
|
||||||
|
excluded_files.add(path)
|
||||||
|
|
||||||
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
script = os.path.join(opts.script_dir, "xorriso-%s.txt" % id(opts))
|
||||||
with open(script, "w") as f:
|
with open(script, "w") as f:
|
||||||
emit(f, "-indev %s" % opts.boot_iso)
|
emit(f, "-indev %s" % opts.boot_iso)
|
||||||
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
|
emit(f, "-outdev %s" % os.path.join(opts.output_dir, opts.iso_name))
|
||||||
emit(f, "-boot_image any replay")
|
emit(f, "-boot_image any replay")
|
||||||
emit(f, "-volid %s" % opts.volid)
|
emit(f, "-volid %s" % opts.volid)
|
||||||
|
# isoinfo -J uses the Joliet tree, and it's used by virt-install
|
||||||
|
emit(f, "-joliet on")
|
||||||
|
# Support long filenames in the Joliet trees. Repodata is particularly
|
||||||
|
# likely to run into this limit.
|
||||||
|
emit(f, "-compliance joliet_long_names")
|
||||||
|
|
||||||
with open(opts.graft_points) as gp:
|
with open(opts.graft_points) as gp:
|
||||||
for line in gp:
|
for line in gp:
|
||||||
iso_path, fs_path = line.strip().split("=", 1)
|
iso_path, fs_path = line.strip().split("=", 1)
|
||||||
emit(f, "-map %s %s" % (fs_path, iso_path))
|
if iso_path in excluded_files:
|
||||||
|
continue
|
||||||
|
cmd = "-update" if iso_path in updated_files else "-map"
|
||||||
|
emit(f, "%s %s %s" % (cmd, fs_path, iso_path))
|
||||||
|
emit(f, "-chmod 0%o %s" % (_get_perms(fs_path), iso_path))
|
||||||
|
|
||||||
if opts.arch == "ppc64le":
|
if opts.arch == "ppc64le":
|
||||||
# This is needed for the image to be bootable.
|
# This is needed for the image to be bootable.
|
||||||
emit(f, "-as mkisofs -U --")
|
emit(f, "-as mkisofs -U --")
|
||||||
|
|
||||||
|
emit(f, "-chown_r 0 /")
|
||||||
|
emit(f, "-chgrp_r 0 /")
|
||||||
emit(f, "-end")
|
emit(f, "-end")
|
||||||
return script
|
return script
|
||||||
|
|
||||||
|
|
|
@ -1118,7 +1118,6 @@ class Pungi(PungiBase):
|
||||||
self.logger.info("Finished gathering package objects.")
|
self.logger.info("Finished gathering package objects.")
|
||||||
|
|
||||||
def gather(self):
|
def gather(self):
|
||||||
|
|
||||||
# get package objects according to the input list
|
# get package objects according to the input list
|
||||||
self.getPackageObjects()
|
self.getPackageObjects()
|
||||||
if self.is_sources:
|
if self.is_sources:
|
||||||
|
|
|
@ -15,17 +15,20 @@
|
||||||
|
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from itertools import count
|
from functools import cmp_to_key
|
||||||
|
from itertools import count, groupby
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from kobo.rpmlib import parse_nvra
|
from kobo.rpmlib import parse_nvra
|
||||||
|
import rpm
|
||||||
|
|
||||||
import pungi.common
|
import pungi.common
|
||||||
import pungi.dnf_wrapper
|
import pungi.dnf_wrapper
|
||||||
import pungi.multilib_dnf
|
import pungi.multilib_dnf
|
||||||
import pungi.util
|
import pungi.util
|
||||||
|
from pungi import arch_utils
|
||||||
from pungi.linker import Linker
|
from pungi.linker import Linker
|
||||||
from pungi.profiler import Profiler
|
from pungi.profiler import Profiler
|
||||||
from pungi.util import DEBUG_PATTERNS
|
from pungi.util import DEBUG_PATTERNS
|
||||||
|
@ -245,12 +248,36 @@ class Gather(GatherBase):
|
||||||
# from lookaside. This can be achieved by removing any package that is
|
# from lookaside. This can be achieved by removing any package that is
|
||||||
# also in lookaside from the list.
|
# also in lookaside from the list.
|
||||||
lookaside_pkgs = set()
|
lookaside_pkgs = set()
|
||||||
for pkg in package_list:
|
|
||||||
if pkg.repoid in self.opts.lookaside_repos:
|
|
||||||
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
|
||||||
|
|
||||||
if self.opts.greedy_method == "all":
|
if self.opts.lookaside_repos:
|
||||||
return list(package_list)
|
# We will call `latest()` to get the highest version packages only.
|
||||||
|
# However, that is per name and architecture. If a package switches
|
||||||
|
# from arched to noarch or the other way, it is possible that the
|
||||||
|
# package_list contains different versions in main repos and in
|
||||||
|
# lookaside repos.
|
||||||
|
# We need to manually filter the latest version.
|
||||||
|
def vercmp(x, y):
|
||||||
|
return rpm.labelCompare(x[1], y[1])
|
||||||
|
|
||||||
|
# Annotate the packages with their version.
|
||||||
|
versioned_packages = [
|
||||||
|
(pkg, (str(pkg.epoch) or "0", pkg.version, pkg.release))
|
||||||
|
for pkg in package_list
|
||||||
|
]
|
||||||
|
# Sort the packages newest first.
|
||||||
|
sorted_packages = sorted(
|
||||||
|
versioned_packages, key=cmp_to_key(vercmp), reverse=True
|
||||||
|
)
|
||||||
|
# Group packages by version, take the first group and discard the
|
||||||
|
# version info from the tuple.
|
||||||
|
package_list = list(
|
||||||
|
x[0] for x in next(groupby(sorted_packages, key=lambda x: x[1]))[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Now we can decide what is used from lookaside.
|
||||||
|
for pkg in package_list:
|
||||||
|
if pkg.repoid in self.opts.lookaside_repos:
|
||||||
|
lookaside_pkgs.add("{0.name}-{0.evr}".format(pkg))
|
||||||
|
|
||||||
all_pkgs = []
|
all_pkgs = []
|
||||||
for pkg in package_list:
|
for pkg in package_list:
|
||||||
|
@ -263,16 +290,21 @@ class Gather(GatherBase):
|
||||||
|
|
||||||
if not debuginfo:
|
if not debuginfo:
|
||||||
native_pkgs = set(
|
native_pkgs = set(
|
||||||
self.q_native_binary_packages.filter(pkg=all_pkgs).apply()
|
self.q_native_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
)
|
)
|
||||||
multilib_pkgs = set(
|
multilib_pkgs = set(
|
||||||
self.q_multilib_binary_packages.filter(pkg=all_pkgs).apply()
|
self.q_multilib_binary_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
native_pkgs = set(self.q_native_debug_packages.filter(pkg=all_pkgs).apply())
|
native_pkgs = set(
|
||||||
multilib_pkgs = set(
|
self.q_native_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
self.q_multilib_debug_packages.filter(pkg=all_pkgs).apply()
|
|
||||||
)
|
)
|
||||||
|
multilib_pkgs = set(
|
||||||
|
self.q_multilib_debug_packages.filter(pkg=all_pkgs).latest().apply()
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.opts.greedy_method == "all":
|
||||||
|
return list(native_pkgs | multilib_pkgs)
|
||||||
|
|
||||||
result = set()
|
result = set()
|
||||||
|
|
||||||
|
@ -392,9 +424,7 @@ class Gather(GatherBase):
|
||||||
"""Given an name of a queue (stored as attribute in `self`), exclude
|
"""Given an name of a queue (stored as attribute in `self`), exclude
|
||||||
all given packages and keep only the latest per package name and arch.
|
all given packages and keep only the latest per package name and arch.
|
||||||
"""
|
"""
|
||||||
setattr(
|
setattr(self, queue, getattr(self, queue).filter(pkg__neq=exclude).apply())
|
||||||
self, queue, getattr(self, queue).filter(pkg__neq=exclude).latest().apply()
|
|
||||||
)
|
|
||||||
|
|
||||||
@Profiler("Gather._apply_excludes()")
|
@Profiler("Gather._apply_excludes()")
|
||||||
def _apply_excludes(self, excludes):
|
def _apply_excludes(self, excludes):
|
||||||
|
@ -500,12 +530,21 @@ class Gather(GatherBase):
|
||||||
name__glob=pattern[:-2]
|
name__glob=pattern[:-2]
|
||||||
).apply()
|
).apply()
|
||||||
else:
|
else:
|
||||||
pkgs = self.q_binary_packages.filter(
|
kwargs = {"name__glob": pattern}
|
||||||
name__glob=pattern
|
if "." in pattern:
|
||||||
).apply()
|
# The pattern could be name.arch. Check if the
|
||||||
|
# arch is valid, and if yes, make a more
|
||||||
|
# specific query.
|
||||||
|
name, arch = pattern.split(".", 1)
|
||||||
|
if arch in arch_utils.arches:
|
||||||
|
kwargs["name__glob"] = name
|
||||||
|
kwargs["arch__eq"] = arch
|
||||||
|
pkgs = self.q_binary_packages.filter(**kwargs).apply()
|
||||||
|
|
||||||
if not pkgs:
|
if not pkgs:
|
||||||
self.logger.error("No package matches pattern %s" % pattern)
|
self.logger.error(
|
||||||
|
"Could not find a match for %s in any configured repo", pattern
|
||||||
|
)
|
||||||
|
|
||||||
# The pattern could have been a glob. In that case we want to
|
# The pattern could have been a glob. In that case we want to
|
||||||
# group the packages by name and get best match in those
|
# group the packages by name and get best match in those
|
||||||
|
@ -616,7 +655,6 @@ class Gather(GatherBase):
|
||||||
return added
|
return added
|
||||||
|
|
||||||
for pkg in self.result_debug_packages.copy():
|
for pkg in self.result_debug_packages.copy():
|
||||||
|
|
||||||
if pkg not in self.finished_add_debug_package_deps:
|
if pkg not in self.finished_add_debug_package_deps:
|
||||||
deps = self._get_package_deps(pkg, debuginfo=True)
|
deps = self._get_package_deps(pkg, debuginfo=True)
|
||||||
for i, req in deps:
|
for i, req in deps:
|
||||||
|
@ -784,7 +822,6 @@ class Gather(GatherBase):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
debug_pkgs = []
|
debug_pkgs = []
|
||||||
pkg_in_lookaside = pkg.repoid in self.opts.lookaside_repos
|
|
||||||
for i in candidates:
|
for i in candidates:
|
||||||
if pkg.arch != i.arch:
|
if pkg.arch != i.arch:
|
||||||
continue
|
continue
|
||||||
|
@ -792,7 +829,7 @@ class Gather(GatherBase):
|
||||||
# If it's not debugsource package or does not match name of
|
# If it's not debugsource package or does not match name of
|
||||||
# the package, we don't want it in.
|
# the package, we don't want it in.
|
||||||
continue
|
continue
|
||||||
if i.repoid in self.opts.lookaside_repos or pkg_in_lookaside:
|
if self.is_from_lookaside(i):
|
||||||
self._set_flag(i, PkgFlag.lookaside)
|
self._set_flag(i, PkgFlag.lookaside)
|
||||||
if i not in self.result_debug_packages:
|
if i not in self.result_debug_packages:
|
||||||
added.add(i)
|
added.add(i)
|
||||||
|
|
|
@ -306,11 +306,6 @@ def write_tree_info(compose, arch, variant, timestamp=None, bi=None):
|
||||||
if variant.type in ("addon",) or variant.is_empty:
|
if variant.type in ("addon",) or variant.is_empty:
|
||||||
return
|
return
|
||||||
|
|
||||||
compose.log_debug(
|
|
||||||
"on arch '%s' looking at variant '%s' of type '%s'"
|
|
||||||
% (arch, variant, variant.type)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not timestamp:
|
if not timestamp:
|
||||||
timestamp = int(time.time())
|
timestamp = int(time.time())
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -297,7 +297,7 @@ class BuildinstallPhase(PhaseBase):
|
||||||
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
"Unsupported buildinstall method: %s" % self.buildinstall_method
|
||||||
)
|
)
|
||||||
|
|
||||||
for (variant, cmd) in commands:
|
for variant, cmd in commands:
|
||||||
self.pool.add(BuildinstallThread(self.pool))
|
self.pool.add(BuildinstallThread(self.pool))
|
||||||
self.pool.queue_put(
|
self.pool.queue_put(
|
||||||
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
(self.compose, arch, variant, cmd, self.pkgset_phase)
|
||||||
|
@ -364,9 +364,17 @@ BOOT_CONFIGS = [
|
||||||
"EFI/BOOT/BOOTX64.conf",
|
"EFI/BOOT/BOOTX64.conf",
|
||||||
"EFI/BOOT/grub.cfg",
|
"EFI/BOOT/grub.cfg",
|
||||||
]
|
]
|
||||||
|
BOOT_IMAGES = [
|
||||||
|
"images/efiboot.img",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
||||||
|
"""
|
||||||
|
Put escaped volume ID and possibly kickstart file into the boot
|
||||||
|
configuration files.
|
||||||
|
:returns: list of paths to modified config files
|
||||||
|
"""
|
||||||
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
volid_escaped = volid.replace(" ", r"\x20").replace("\\", "\\\\")
|
||||||
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
volid_escaped_2 = volid_escaped.replace("\\", "\\\\")
|
||||||
found_configs = []
|
found_configs = []
|
||||||
|
@ -374,7 +382,6 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
||||||
config_path = os.path.join(path, config)
|
config_path = os.path.join(path, config)
|
||||||
if not os.path.exists(config_path):
|
if not os.path.exists(config_path):
|
||||||
continue
|
continue
|
||||||
found_configs.append(config)
|
|
||||||
|
|
||||||
with open(config_path, "r") as f:
|
with open(config_path, "r") as f:
|
||||||
data = original_data = f.read()
|
data = original_data = f.read()
|
||||||
|
@ -394,8 +401,13 @@ def tweak_configs(path, volid, ks_file, configs=BOOT_CONFIGS, logger=None):
|
||||||
with open(config_path, "w") as f:
|
with open(config_path, "w") as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
if logger and data != original_data:
|
if data != original_data:
|
||||||
logger.info("Boot config %s changed" % config_path)
|
found_configs.append(config)
|
||||||
|
if logger:
|
||||||
|
# Generally lorax should create file with correct volume id
|
||||||
|
# already. If we don't have a kickstart, this function should
|
||||||
|
# be a no-op.
|
||||||
|
logger.info("Boot config %s changed" % config_path)
|
||||||
|
|
||||||
return found_configs
|
return found_configs
|
||||||
|
|
||||||
|
@ -434,31 +446,32 @@ def tweak_buildinstall(
|
||||||
if kickstart_file and found_configs:
|
if kickstart_file and found_configs:
|
||||||
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
shutil.copy2(kickstart_file, os.path.join(dst, "ks.cfg"))
|
||||||
|
|
||||||
images = [
|
images = [os.path.join(tmp_dir, img) for img in BOOT_IMAGES]
|
||||||
os.path.join(tmp_dir, "images", "efiboot.img"),
|
if found_configs:
|
||||||
]
|
for image in images:
|
||||||
for image in images:
|
if not os.path.isfile(image):
|
||||||
if not os.path.isfile(image):
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
with iso.mount(
|
with iso.mount(
|
||||||
image,
|
image,
|
||||||
logger=compose._logger,
|
logger=compose._logger,
|
||||||
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
use_guestmount=compose.conf.get("buildinstall_use_guestmount"),
|
||||||
) as mount_tmp_dir:
|
) as mount_tmp_dir:
|
||||||
for config in BOOT_CONFIGS:
|
for config in found_configs:
|
||||||
config_path = os.path.join(tmp_dir, config)
|
# Put each modified config file into the image (overwriting the
|
||||||
config_in_image = os.path.join(mount_tmp_dir, config)
|
# original).
|
||||||
|
config_path = os.path.join(tmp_dir, config)
|
||||||
|
config_in_image = os.path.join(mount_tmp_dir, config)
|
||||||
|
|
||||||
if os.path.isfile(config_in_image):
|
if os.path.isfile(config_in_image):
|
||||||
cmd = [
|
cmd = [
|
||||||
"cp",
|
"cp",
|
||||||
"-v",
|
"-v",
|
||||||
"--remove-destination",
|
"--remove-destination",
|
||||||
config_path,
|
config_path,
|
||||||
config_in_image,
|
config_in_image,
|
||||||
]
|
]
|
||||||
run(cmd)
|
run(cmd)
|
||||||
|
|
||||||
# HACK: make buildinstall files world readable
|
# HACK: make buildinstall files world readable
|
||||||
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
run("chmod -R a+rX %s" % shlex_quote(tmp_dir))
|
||||||
|
@ -533,7 +546,14 @@ def link_boot_iso(compose, arch, variant, can_fail):
|
||||||
img.volume_id = iso.get_volume_id(new_boot_iso_path)
|
img.volume_id = iso.get_volume_id(new_boot_iso_path)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
pass
|
pass
|
||||||
compose.im.add(variant.uid, arch, img)
|
# In this phase we should add to compose only the images that
|
||||||
|
# will be used only as netinstall.
|
||||||
|
# On this step lorax generates environment
|
||||||
|
# for creating isos and create them.
|
||||||
|
# On step `extra_isos` we overwrite the not needed iso `boot Minimal` by
|
||||||
|
# new iso. It already contains necessary packages from incldued variants.
|
||||||
|
if variant.uid in compose.conf['netinstall_variants']:
|
||||||
|
compose.im.add(variant.uid, arch, img)
|
||||||
compose.log_info("[DONE ] %s" % msg)
|
compose.log_info("[DONE ] %s" % msg)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -369,7 +369,7 @@ class CreateisoPhase(PhaseLoggerMixin, PhaseBase):
|
||||||
if self.compose.notifier:
|
if self.compose.notifier:
|
||||||
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
self.compose.notifier.send("createiso-targets", deliverables=deliverables)
|
||||||
|
|
||||||
for (cmd, variant, arch) in commands:
|
for cmd, variant, arch in commands:
|
||||||
self.pool.add(CreateIsoThread(self.pool))
|
self.pool.add(CreateIsoThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,7 @@ class ExtraIsosPhase(PhaseLoggerMixin, ConfigGuardedPhase, PhaseBase):
|
||||||
for arch in sorted(arches):
|
for arch in sorted(arches):
|
||||||
commands.append((config, variant, arch))
|
commands.append((config, variant, arch))
|
||||||
|
|
||||||
for (config, variant, arch) in commands:
|
for config, variant, arch in commands:
|
||||||
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
self.pool.add(ExtraIsosThread(self.pool, self.bi))
|
||||||
self.pool.queue_put((self.compose, config, variant, arch))
|
self.pool.queue_put((self.compose, config, variant, arch))
|
||||||
|
|
||||||
|
@ -420,6 +420,12 @@ def get_iso_contents(
|
||||||
original_treeinfo,
|
original_treeinfo,
|
||||||
os.path.join(extra_files_dir, ".treeinfo"),
|
os.path.join(extra_files_dir, ".treeinfo"),
|
||||||
)
|
)
|
||||||
|
tweak_repo_treeinfo(
|
||||||
|
compose,
|
||||||
|
include_variants,
|
||||||
|
original_treeinfo,
|
||||||
|
original_treeinfo,
|
||||||
|
)
|
||||||
|
|
||||||
# Add extra files specific for the ISO
|
# Add extra files specific for the ISO
|
||||||
files.update(
|
files.update(
|
||||||
|
@ -431,6 +437,45 @@ def get_iso_contents(
|
||||||
return gp
|
return gp
|
||||||
|
|
||||||
|
|
||||||
|
def tweak_repo_treeinfo(compose, include_variants, source_file, dest_file):
|
||||||
|
"""
|
||||||
|
The method includes the variants to file .treeinfo of a variant. It takes
|
||||||
|
the variants which are described
|
||||||
|
by options `extra_isos -> include_variants`.
|
||||||
|
"""
|
||||||
|
ti = productmd.treeinfo.TreeInfo()
|
||||||
|
ti.load(source_file)
|
||||||
|
main_variant = next(iter(ti.variants))
|
||||||
|
for variant_uid in include_variants:
|
||||||
|
variant = compose.all_variants[variant_uid]
|
||||||
|
var = productmd.treeinfo.Variant(ti)
|
||||||
|
var.id = variant.id
|
||||||
|
var.uid = variant.uid
|
||||||
|
var.name = variant.name
|
||||||
|
var.type = variant.type
|
||||||
|
ti.variants.add(var)
|
||||||
|
|
||||||
|
for variant_id in ti.variants:
|
||||||
|
var = ti.variants[variant_id]
|
||||||
|
if variant_id == main_variant:
|
||||||
|
var.paths.packages = 'Packages'
|
||||||
|
var.paths.repository = '.'
|
||||||
|
else:
|
||||||
|
var.paths.packages = os.path.join(
|
||||||
|
'../../..',
|
||||||
|
var.uid,
|
||||||
|
var.arch,
|
||||||
|
'os/Packages',
|
||||||
|
)
|
||||||
|
var.paths.repository = os.path.join(
|
||||||
|
'../../..',
|
||||||
|
var.uid,
|
||||||
|
var.arch,
|
||||||
|
'os',
|
||||||
|
)
|
||||||
|
ti.dump(dest_file, main_variant=main_variant)
|
||||||
|
|
||||||
|
|
||||||
def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
||||||
ti = load_and_tweak_treeinfo(source_file)
|
ti = load_and_tweak_treeinfo(source_file)
|
||||||
for variant_uid in include_variants:
|
for variant_uid in include_variants:
|
||||||
|
@ -446,7 +491,6 @@ def tweak_treeinfo(compose, include_variants, source_file, dest_file):
|
||||||
var = ti.variants[variant_id]
|
var = ti.variants[variant_id]
|
||||||
var.paths.packages = os.path.join(var.uid, "Packages")
|
var.paths.packages = os.path.join(var.uid, "Packages")
|
||||||
var.paths.repository = var.uid
|
var.paths.repository = var.uid
|
||||||
|
|
||||||
ti.dump(dest_file)
|
ti.dump(dest_file)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ import threading
|
||||||
from kobo.rpmlib import parse_nvra
|
from kobo.rpmlib import parse_nvra
|
||||||
from kobo.shortcuts import run
|
from kobo.shortcuts import run
|
||||||
from productmd.rpms import Rpms
|
from productmd.rpms import Rpms
|
||||||
|
from pungi.phases.pkgset.common import get_all_arches
|
||||||
from six.moves import cPickle as pickle
|
from six.moves import cPickle as pickle
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -90,7 +91,7 @@ class GatherPhase(PhaseBase):
|
||||||
|
|
||||||
# check whether variants from configuration value
|
# check whether variants from configuration value
|
||||||
# 'variant_as_lookaside' are correct
|
# 'variant_as_lookaside' are correct
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for requiring, required in variant_as_lookaside:
|
||||||
if requiring in all_variants and required not in all_variants:
|
if requiring in all_variants and required not in all_variants:
|
||||||
errors.append(
|
errors.append(
|
||||||
"variant_as_lookaside: variant %r doesn't exist but is "
|
"variant_as_lookaside: variant %r doesn't exist but is "
|
||||||
|
@ -99,7 +100,7 @@ class GatherPhase(PhaseBase):
|
||||||
|
|
||||||
# check whether variants from configuration value
|
# check whether variants from configuration value
|
||||||
# 'variant_as_lookaside' have same architectures
|
# 'variant_as_lookaside' have same architectures
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for requiring, required in variant_as_lookaside:
|
||||||
if (
|
if (
|
||||||
requiring in all_variants
|
requiring in all_variants
|
||||||
and required in all_variants
|
and required in all_variants
|
||||||
|
@ -235,7 +236,7 @@ def reuse_old_gather_packages(compose, arch, variant, package_sets, methods):
|
||||||
if not hasattr(compose, "_gather_reused_variant_arch"):
|
if not hasattr(compose, "_gather_reused_variant_arch"):
|
||||||
setattr(compose, "_gather_reused_variant_arch", [])
|
setattr(compose, "_gather_reused_variant_arch", [])
|
||||||
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
variant_as_lookaside = compose.conf.get("variant_as_lookaside", [])
|
||||||
for (requiring, required) in variant_as_lookaside:
|
for requiring, required in variant_as_lookaside:
|
||||||
if (
|
if (
|
||||||
requiring == variant.uid
|
requiring == variant.uid
|
||||||
and (required, arch) not in compose._gather_reused_variant_arch
|
and (required, arch) not in compose._gather_reused_variant_arch
|
||||||
|
@ -468,9 +469,7 @@ def gather_packages(compose, arch, variant, package_sets, fulltree_excludes=None
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
for source_name in ("module", "comps", "json"):
|
for source_name in ("module", "comps", "json"):
|
||||||
|
|
||||||
packages, groups, filter_packages = get_variant_packages(
|
packages, groups, filter_packages = get_variant_packages(
|
||||||
compose, arch, variant, source_name, package_sets
|
compose, arch, variant, source_name, package_sets
|
||||||
)
|
)
|
||||||
|
@ -575,7 +574,6 @@ def trim_packages(compose, arch, variant, pkg_map, parent_pkgs=None, remove_pkgs
|
||||||
move_to_parent_pkgs = _mk_pkg_map()
|
move_to_parent_pkgs = _mk_pkg_map()
|
||||||
removed_pkgs = _mk_pkg_map()
|
removed_pkgs = _mk_pkg_map()
|
||||||
for pkg_type, pkgs in pkg_map.items():
|
for pkg_type, pkgs in pkg_map.items():
|
||||||
|
|
||||||
new_pkgs = []
|
new_pkgs = []
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
pkg_path = pkg["path"]
|
pkg_path = pkg["path"]
|
||||||
|
@ -647,8 +645,14 @@ def _make_lookaside_repo(compose, variant, arch, pkg_map, package_sets=None):
|
||||||
compose.paths.work.topdir(arch="global"), "download"
|
compose.paths.work.topdir(arch="global"), "download"
|
||||||
)
|
)
|
||||||
+ "/",
|
+ "/",
|
||||||
"koji": lambda: pungi.wrappers.kojiwrapper.KojiWrapper(
|
"koji": lambda: compose.conf.get(
|
||||||
compose
|
"koji_cache",
|
||||||
|
pungi.wrappers.kojiwrapper.KojiWrapper(compose).koji_module.config.topdir,
|
||||||
|
).rstrip("/")
|
||||||
|
+ "/",
|
||||||
|
"kojimock": lambda: pungi.wrappers.kojiwrapper.KojiMockWrapper(
|
||||||
|
compose,
|
||||||
|
get_all_arches(compose),
|
||||||
).koji_module.config.topdir.rstrip("/")
|
).koji_module.config.topdir.rstrip("/")
|
||||||
+ "/",
|
+ "/",
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,9 +47,15 @@ class FakePackage(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def files(self):
|
def files(self):
|
||||||
return [
|
paths = []
|
||||||
os.path.join(dirname, basename) for (_, dirname, basename) in self.pkg.files
|
# createrepo_c.Package.files is a tuple, but its length differs across
|
||||||
]
|
# versions. The constants define index at which the related value is
|
||||||
|
# located.
|
||||||
|
for entry in self.pkg.files:
|
||||||
|
paths.append(
|
||||||
|
os.path.join(entry[cr.FILE_ENTRY_PATH], entry[cr.FILE_ENTRY_NAME])
|
||||||
|
)
|
||||||
|
return paths
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def provides(self):
|
def provides(self):
|
||||||
|
|
|
@ -25,6 +25,7 @@ from productmd.rpms import Rpms
|
||||||
# results will be pulled into the compose.
|
# results will be pulled into the compose.
|
||||||
EXTENSIONS = {
|
EXTENSIONS = {
|
||||||
"docker": ["tar.gz", "tar.xz"],
|
"docker": ["tar.gz", "tar.xz"],
|
||||||
|
"iso": ["iso"],
|
||||||
"liveimg-squashfs": ["liveimg.squashfs"],
|
"liveimg-squashfs": ["liveimg.squashfs"],
|
||||||
"qcow": ["qcow"],
|
"qcow": ["qcow"],
|
||||||
"qcow2": ["qcow2"],
|
"qcow2": ["qcow2"],
|
||||||
|
@ -39,6 +40,7 @@ EXTENSIONS = {
|
||||||
"vdi": ["vdi"],
|
"vdi": ["vdi"],
|
||||||
"vmdk": ["vmdk"],
|
"vmdk": ["vmdk"],
|
||||||
"vpc": ["vhd"],
|
"vpc": ["vhd"],
|
||||||
|
"vhd-compressed": ["vhd.gz", "vhd.xz"],
|
||||||
"vsphere-ova": ["vsphere.ova"],
|
"vsphere-ova": ["vsphere.ova"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -344,7 +346,9 @@ class CreateImageBuildThread(WorkerThread):
|
||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = os.path.realpath(image_info["path"])
|
src_file = compose.koji_downloader.get_file(
|
||||||
|
os.path.realpath(image_info["path"])
|
||||||
|
)
|
||||||
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
linker.link(src_file, image_dest, link_type=cmd["link_type"])
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
|
|
|
@ -117,7 +117,7 @@ class LiveImagesPhase(
|
||||||
|
|
||||||
commands.append((cmd, variant, arch))
|
commands.append((cmd, variant, arch))
|
||||||
|
|
||||||
for (cmd, variant, arch) in commands:
|
for cmd, variant, arch in commands:
|
||||||
self.pool.add(CreateLiveImageThread(self.pool))
|
self.pool.add(CreateLiveImageThread(self.pool))
|
||||||
self.pool.queue_put((self.compose, cmd, variant, arch))
|
self.pool.queue_put((self.compose, cmd, variant, arch))
|
||||||
|
|
||||||
|
@ -232,7 +232,7 @@ class CreateLiveImageThread(WorkerThread):
|
||||||
"Got %d images from task %d, expected 1."
|
"Got %d images from task %d, expected 1."
|
||||||
% (len(image_path), output["task_id"])
|
% (len(image_path), output["task_id"])
|
||||||
)
|
)
|
||||||
image_path = image_path[0]
|
image_path = compose.koji_downloader.get_file(image_path[0])
|
||||||
filename = cmd.get("filename") or os.path.basename(image_path)
|
filename = cmd.get("filename") or os.path.basename(image_path)
|
||||||
destination = os.path.join(cmd["dest_dir"], filename)
|
destination = os.path.join(cmd["dest_dir"], filename)
|
||||||
shutil.copy2(image_path, destination)
|
shutil.copy2(image_path, destination)
|
||||||
|
|
|
@ -182,7 +182,9 @@ class LiveMediaThread(WorkerThread):
|
||||||
# let's not change filename of koji outputs
|
# let's not change filename of koji outputs
|
||||||
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
image_dest = os.path.join(image_dir, os.path.basename(image_info["path"]))
|
||||||
|
|
||||||
src_file = os.path.realpath(image_info["path"])
|
src_file = compose.koji_downloader.get_file(
|
||||||
|
os.path.realpath(image_info["path"])
|
||||||
|
)
|
||||||
linker.link(src_file, image_dest, link_type=link_type)
|
linker.link(src_file, image_dest, link_type=link_type)
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
|
|
|
@ -27,6 +27,35 @@ class OSBuildPhase(
|
||||||
arches = set(image_conf["arches"]) & arches
|
arches = set(image_conf["arches"]) & arches
|
||||||
return sorted(arches)
|
return sorted(arches)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_repo_urls(compose, repos, arch="$basearch"):
|
||||||
|
"""
|
||||||
|
Get list of repos with resolved repo URLs. Preserve repos defined
|
||||||
|
as dicts.
|
||||||
|
"""
|
||||||
|
resolved_repos = []
|
||||||
|
|
||||||
|
for repo in repos:
|
||||||
|
if isinstance(repo, dict):
|
||||||
|
try:
|
||||||
|
url = repo["baseurl"]
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError(
|
||||||
|
"`baseurl` is required in repo dict %s" % str(repo)
|
||||||
|
)
|
||||||
|
url = util.get_repo_url(compose, url, arch=arch)
|
||||||
|
if url is None:
|
||||||
|
raise RuntimeError("Failed to resolve repo URL for %s" % str(repo))
|
||||||
|
repo["baseurl"] = url
|
||||||
|
resolved_repos.append(repo)
|
||||||
|
else:
|
||||||
|
repo = util.get_repo_url(compose, repo, arch=arch)
|
||||||
|
if repo is None:
|
||||||
|
raise RuntimeError("Failed to resolve repo URL for %s" % repo)
|
||||||
|
resolved_repos.append(repo)
|
||||||
|
|
||||||
|
return resolved_repos
|
||||||
|
|
||||||
def _get_repo(self, image_conf, variant):
|
def _get_repo(self, image_conf, variant):
|
||||||
"""
|
"""
|
||||||
Get a list of repos. First included are those explicitly listed in
|
Get a list of repos. First included are those explicitly listed in
|
||||||
|
@ -38,7 +67,7 @@ class OSBuildPhase(
|
||||||
if not variant.is_empty and variant.uid not in repos:
|
if not variant.is_empty and variant.uid not in repos:
|
||||||
repos.append(variant.uid)
|
repos.append(variant.uid)
|
||||||
|
|
||||||
return util.get_repo_urls(self.compose, repos, arch="$arch")
|
return OSBuildPhase._get_repo_urls(self.compose, repos, arch="$arch")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for variant in self.compose.get_variants():
|
for variant in self.compose.get_variants():
|
||||||
|
@ -183,16 +212,27 @@ class RunOSBuildThread(WorkerThread):
|
||||||
# image_dir is absolute path to which the image should be copied.
|
# image_dir is absolute path to which the image should be copied.
|
||||||
# We also need the same path as relative to compose directory for
|
# We also need the same path as relative to compose directory for
|
||||||
# including in the metadata.
|
# including in the metadata.
|
||||||
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
if archive["type_name"] == "iso":
|
||||||
rel_image_dir = compose.paths.compose.image_dir(variant, relative=True) % {
|
# If the produced image is actually an ISO, it should go to
|
||||||
"arch": arch
|
# iso/ subdirectory.
|
||||||
}
|
image_dir = compose.paths.compose.iso_dir(arch, variant)
|
||||||
|
rel_image_dir = compose.paths.compose.iso_dir(
|
||||||
|
arch, variant, relative=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
image_dir = compose.paths.compose.image_dir(variant) % {"arch": arch}
|
||||||
|
rel_image_dir = compose.paths.compose.image_dir(
|
||||||
|
variant, relative=True
|
||||||
|
) % {"arch": arch}
|
||||||
util.makedirs(image_dir)
|
util.makedirs(image_dir)
|
||||||
|
|
||||||
image_dest = os.path.join(image_dir, archive["filename"])
|
image_dest = os.path.join(image_dir, archive["filename"])
|
||||||
|
|
||||||
src_file = os.path.join(
|
src_file = compose.koji_downloader.get_file(
|
||||||
koji.koji_module.pathinfo.imagebuild(build_info), archive["filename"]
|
os.path.join(
|
||||||
|
koji.koji_module.pathinfo.imagebuild(build_info),
|
||||||
|
archive["filename"],
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
linker.link(src_file, image_dest, link_type=compose.conf["link_type"])
|
||||||
|
@ -209,7 +249,7 @@ class RunOSBuildThread(WorkerThread):
|
||||||
|
|
||||||
# Update image manifest
|
# Update image manifest
|
||||||
img = Image(compose.im)
|
img = Image(compose.im)
|
||||||
img.type = archive["type_name"]
|
img.type = archive["type_name"] if archive["type_name"] != "iso" else "dvd"
|
||||||
img.format = suffix
|
img.format = suffix
|
||||||
img.path = os.path.join(rel_image_dir, archive["filename"])
|
img.path = os.path.join(rel_image_dir, archive["filename"])
|
||||||
img.mtime = util.get_mtime(image_dest)
|
img.mtime = util.get_mtime(image_dest)
|
||||||
|
|
|
@ -168,7 +168,9 @@ class OSTreeThread(WorkerThread):
|
||||||
("unified-core", config.get("unified_core", False)),
|
("unified-core", config.get("unified_core", False)),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
packages = ["pungi", "ostree", "rpm-ostree"]
|
default_packages = ["pungi", "ostree", "rpm-ostree"]
|
||||||
|
additional_packages = config.get("runroot_packages", [])
|
||||||
|
packages = default_packages + additional_packages
|
||||||
log_file = os.path.join(self.logdir, "runroot.log")
|
log_file = os.path.join(self.logdir, "runroot.log")
|
||||||
mounts = [compose.topdir, config["ostree_repo"]]
|
mounts = [compose.topdir, config["ostree_repo"]]
|
||||||
runroot = Runroot(compose, phase="ostree")
|
runroot = Runroot(compose, phase="ostree")
|
||||||
|
|
|
@ -38,12 +38,17 @@ from pungi.phases.createrepo import add_modular_metadata
|
||||||
|
|
||||||
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
def populate_arch_pkgsets(compose, path_prefix, global_pkgset):
|
||||||
result = {}
|
result = {}
|
||||||
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
|
||||||
for arch in compose.get_arches():
|
for arch in compose.get_arches():
|
||||||
compose.log_info("Populating package set for arch: %s", arch)
|
compose.log_info("Populating package set for arch: %s", arch)
|
||||||
is_multilib = is_arch_multilib(compose.conf, arch)
|
is_multilib = is_arch_multilib(compose.conf, arch)
|
||||||
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
arches = get_valid_arches(arch, is_multilib, add_src=True)
|
||||||
pkgset = global_pkgset.subset(arch, arches, exclusive_noarch=exclusive_noarch)
|
pkgset = global_pkgset.subset(
|
||||||
|
arch,
|
||||||
|
arches,
|
||||||
|
exclusive_noarch=compose.conf["pkgset_exclusive_arch_considers_noarch"],
|
||||||
|
inherit_to_noarch=compose.conf["pkgset_inherit_exclusive_arch_to_noarch"],
|
||||||
|
)
|
||||||
pkgset.save_file_list(
|
pkgset.save_file_list(
|
||||||
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
compose.paths.work.package_list(arch=arch, pkgset=global_pkgset),
|
||||||
remove_path_prefix=path_prefix,
|
remove_path_prefix=path_prefix,
|
||||||
|
|
|
@ -23,11 +23,15 @@ import itertools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
import pgpy
|
||||||
|
import rpm
|
||||||
from six.moves import cPickle as pickle
|
from six.moves import cPickle as pickle
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
import kobo.log
|
import kobo.log
|
||||||
import kobo.pkgset
|
import kobo.pkgset
|
||||||
import kobo.rpmlib
|
import kobo.rpmlib
|
||||||
|
from kobo.shortcuts import compute_file_checksums
|
||||||
|
|
||||||
from kobo.threads import WorkerThread, ThreadPool
|
from kobo.threads import WorkerThread, ThreadPool
|
||||||
|
|
||||||
|
@ -150,9 +154,15 @@ class PackageSetBase(kobo.log.LoggingBase):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def nvr_formatter(package_info):
|
def nvr_formatter(package_info):
|
||||||
# joins NVR parts of the package with '-' character.
|
epoch_suffix = ''
|
||||||
return "-".join(
|
if package_info['epoch'] is not None:
|
||||||
(package_info["name"], package_info["version"], package_info["release"])
|
epoch_suffix = ':' + package_info['epoch']
|
||||||
|
return (
|
||||||
|
f"{package_info['name']}"
|
||||||
|
f"{epoch_suffix}-"
|
||||||
|
f"{package_info['version']}-"
|
||||||
|
f"{package_info['release']}."
|
||||||
|
f"{package_info['arch']}"
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_error(sigkeys, infos):
|
def get_error(sigkeys, infos):
|
||||||
|
@ -203,16 +213,31 @@ class PackageSetBase(kobo.log.LoggingBase):
|
||||||
|
|
||||||
return self.rpms_by_arch
|
return self.rpms_by_arch
|
||||||
|
|
||||||
def subset(self, primary_arch, arch_list, exclusive_noarch=True):
|
def subset(
|
||||||
|
self, primary_arch, arch_list, exclusive_noarch=True, inherit_to_noarch=True
|
||||||
|
):
|
||||||
"""Create a subset of this package set that only includes
|
"""Create a subset of this package set that only includes
|
||||||
packages compatible with"""
|
packages compatible with"""
|
||||||
pkgset = PackageSetBase(
|
pkgset = PackageSetBase(
|
||||||
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
self.name, self.sigkey_ordering, logger=self._logger, arches=arch_list
|
||||||
)
|
)
|
||||||
pkgset.merge(self, primary_arch, arch_list, exclusive_noarch=exclusive_noarch)
|
pkgset.merge(
|
||||||
|
self,
|
||||||
|
primary_arch,
|
||||||
|
arch_list,
|
||||||
|
exclusive_noarch=exclusive_noarch,
|
||||||
|
inherit_to_noarch=inherit_to_noarch,
|
||||||
|
)
|
||||||
return pkgset
|
return pkgset
|
||||||
|
|
||||||
def merge(self, other, primary_arch, arch_list, exclusive_noarch=True):
|
def merge(
|
||||||
|
self,
|
||||||
|
other,
|
||||||
|
primary_arch,
|
||||||
|
arch_list,
|
||||||
|
exclusive_noarch=True,
|
||||||
|
inherit_to_noarch=True,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Merge ``other`` package set into this instance.
|
Merge ``other`` package set into this instance.
|
||||||
"""
|
"""
|
||||||
|
@ -251,7 +276,7 @@ class PackageSetBase(kobo.log.LoggingBase):
|
||||||
if i.file_path in self.file_cache:
|
if i.file_path in self.file_cache:
|
||||||
# TODO: test if it really works
|
# TODO: test if it really works
|
||||||
continue
|
continue
|
||||||
if exclusivearch_list and arch == "noarch":
|
if inherit_to_noarch and exclusivearch_list and arch == "noarch":
|
||||||
if is_excluded(i, exclusivearch_list, logger=self._logger):
|
if is_excluded(i, exclusivearch_list, logger=self._logger):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -318,6 +343,11 @@ class FilelistPackageSet(PackageSetBase):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# This is a marker to indicate package set with only extra builds/tasks and no
|
||||||
|
# tasks.
|
||||||
|
MISSING_KOJI_TAG = object()
|
||||||
|
|
||||||
|
|
||||||
class KojiPackageSet(PackageSetBase):
|
class KojiPackageSet(PackageSetBase):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -334,6 +364,7 @@ class KojiPackageSet(PackageSetBase):
|
||||||
extra_tasks=None,
|
extra_tasks=None,
|
||||||
signed_packages_retries=0,
|
signed_packages_retries=0,
|
||||||
signed_packages_wait=30,
|
signed_packages_wait=30,
|
||||||
|
downloader=None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Creates new KojiPackageSet.
|
Creates new KojiPackageSet.
|
||||||
|
@ -371,7 +402,7 @@ class KojiPackageSet(PackageSetBase):
|
||||||
:param int signed_packages_wait: How long to wait between search attemts.
|
:param int signed_packages_wait: How long to wait between search attemts.
|
||||||
"""
|
"""
|
||||||
super(KojiPackageSet, self).__init__(
|
super(KojiPackageSet, self).__init__(
|
||||||
name,
|
name if name != MISSING_KOJI_TAG else "no-tag",
|
||||||
sigkey_ordering=sigkey_ordering,
|
sigkey_ordering=sigkey_ordering,
|
||||||
arches=arches,
|
arches=arches,
|
||||||
logger=logger,
|
logger=logger,
|
||||||
|
@ -388,6 +419,8 @@ class KojiPackageSet(PackageSetBase):
|
||||||
self.signed_packages_retries = signed_packages_retries
|
self.signed_packages_retries = signed_packages_retries
|
||||||
self.signed_packages_wait = signed_packages_wait
|
self.signed_packages_wait = signed_packages_wait
|
||||||
|
|
||||||
|
self.downloader = downloader
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
result = self.__dict__.copy()
|
result = self.__dict__.copy()
|
||||||
del result["koji_wrapper"]
|
del result["koji_wrapper"]
|
||||||
|
@ -478,7 +511,8 @@ class KojiPackageSet(PackageSetBase):
|
||||||
|
|
||||||
response = None
|
response = None
|
||||||
if self.cache_region:
|
if self.cache_region:
|
||||||
cache_key = "KojiPackageSet.get_latest_rpms_%s_%s_%s" % (
|
cache_key = "%s.get_latest_rpms_%s_%s_%s" % (
|
||||||
|
str(self.__class__.__name__),
|
||||||
str(tag),
|
str(tag),
|
||||||
str(event),
|
str(event),
|
||||||
str(inherit),
|
str(inherit),
|
||||||
|
@ -500,17 +534,36 @@ class KojiPackageSet(PackageSetBase):
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_package_path(self, queue_item):
|
def get_package_path(self, queue_item):
|
||||||
rpm_info, build_info = queue_item
|
rpm_info, build_info = queue_item
|
||||||
|
|
||||||
# Check if this RPM is coming from scratch task. In this case, we already
|
# Check if this RPM is coming from scratch task. In this case, we already
|
||||||
# know the path.
|
# know the path.
|
||||||
if "path_from_task" in rpm_info:
|
if "path_from_task" in rpm_info:
|
||||||
return rpm_info["path_from_task"]
|
return self.downloader.get_file(rpm_info["path_from_task"])
|
||||||
|
|
||||||
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||||
paths = []
|
paths = []
|
||||||
|
|
||||||
|
if "getRPMChecksums" in self.koji_proxy.system.listMethods():
|
||||||
|
|
||||||
|
def checksum_validator(keyname, pkg_path):
|
||||||
|
checksums = self.koji_proxy.getRPMChecksums(
|
||||||
|
rpm_info["id"], checksum_types=("sha256",)
|
||||||
|
)
|
||||||
|
if "sha256" in checksums.get(keyname, {}):
|
||||||
|
computed = compute_file_checksums(pkg_path, ("sha256",))
|
||||||
|
if computed["sha256"] != checksums[keyname]["sha256"]:
|
||||||
|
raise RuntimeError("Checksum mismatch for %s" % pkg_path)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def checksum_validator(keyname, pkg_path):
|
||||||
|
# Koji doesn't support checksums yet
|
||||||
|
pass
|
||||||
|
|
||||||
attempts_left = self.signed_packages_retries + 1
|
attempts_left = self.signed_packages_retries + 1
|
||||||
while attempts_left > 0:
|
while attempts_left > 0:
|
||||||
for sigkey in self.sigkey_ordering:
|
for sigkey in self.sigkey_ordering:
|
||||||
|
@ -523,8 +576,11 @@ class KojiPackageSet(PackageSetBase):
|
||||||
)
|
)
|
||||||
if rpm_path not in paths:
|
if rpm_path not in paths:
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
if os.path.isfile(rpm_path):
|
path = self.downloader.get_file(
|
||||||
return rpm_path
|
rpm_path, partial(checksum_validator, sigkey)
|
||||||
|
)
|
||||||
|
if path:
|
||||||
|
return path
|
||||||
|
|
||||||
# No signed copy was found, wait a little and try again.
|
# No signed copy was found, wait a little and try again.
|
||||||
attempts_left -= 1
|
attempts_left -= 1
|
||||||
|
@ -537,16 +593,18 @@ class KojiPackageSet(PackageSetBase):
|
||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
if os.path.isfile(rpm_path):
|
path = self.downloader.get_file(rpm_path, partial(checksum_validator, ""))
|
||||||
return rpm_path
|
if path:
|
||||||
|
return path
|
||||||
|
|
||||||
if self._allow_invalid_sigkeys and rpm_info["name"] not in self.packages:
|
if self._allow_invalid_sigkeys and rpm_info["name"] not in self.packages:
|
||||||
# use an unsigned copy (if allowed)
|
# use an unsigned copy (if allowed)
|
||||||
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
rpm_path = os.path.join(pathinfo.build(build_info), pathinfo.rpm(rpm_info))
|
||||||
paths.append(rpm_path)
|
paths.append(rpm_path)
|
||||||
if os.path.isfile(rpm_path):
|
path = self.downloader.get_file(rpm_path)
|
||||||
|
if path:
|
||||||
self._invalid_sigkey_rpms.append(rpm_info)
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
return rpm_path
|
return path
|
||||||
|
|
||||||
self._invalid_sigkey_rpms.append(rpm_info)
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
self.log_error(
|
self.log_error(
|
||||||
|
@ -576,7 +634,9 @@ class KojiPackageSet(PackageSetBase):
|
||||||
inherit,
|
inherit,
|
||||||
)
|
)
|
||||||
self.log_info("[BEGIN] %s" % msg)
|
self.log_info("[BEGIN] %s" % msg)
|
||||||
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
rpms, builds = [], []
|
||||||
|
if tag != MISSING_KOJI_TAG:
|
||||||
|
rpms, builds = self.get_latest_rpms(tag, event, inherit=inherit)
|
||||||
extra_rpms, extra_builds = self.get_extra_rpms()
|
extra_rpms, extra_builds = self.get_extra_rpms()
|
||||||
rpms += extra_rpms
|
rpms += extra_rpms
|
||||||
builds += extra_builds
|
builds += extra_builds
|
||||||
|
@ -681,6 +741,15 @@ class KojiPackageSet(PackageSetBase):
|
||||||
:param include_packages: an iterable of tuples (package name, arch) that should
|
:param include_packages: an iterable of tuples (package name, arch) that should
|
||||||
be included.
|
be included.
|
||||||
"""
|
"""
|
||||||
|
if len(self.sigkey_ordering) > 1 and (
|
||||||
|
None in self.sigkey_ordering or "" in self.sigkey_ordering
|
||||||
|
):
|
||||||
|
self.log_warning(
|
||||||
|
"Stop writing reuse file as unsigned packages are allowed "
|
||||||
|
"in the compose."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
reuse_file = compose.paths.work.pkgset_reuse_file(self.name)
|
reuse_file = compose.paths.work.pkgset_reuse_file(self.name)
|
||||||
self.log_info("Writing pkgset reuse file: %s" % reuse_file)
|
self.log_info("Writing pkgset reuse file: %s" % reuse_file)
|
||||||
try:
|
try:
|
||||||
|
@ -697,6 +766,12 @@ class KojiPackageSet(PackageSetBase):
|
||||||
"srpms_by_name": self.srpms_by_name,
|
"srpms_by_name": self.srpms_by_name,
|
||||||
"extra_builds": self.extra_builds,
|
"extra_builds": self.extra_builds,
|
||||||
"include_packages": include_packages,
|
"include_packages": include_packages,
|
||||||
|
"inherit_to_noarch": compose.conf[
|
||||||
|
"pkgset_inherit_exclusive_arch_to_noarch"
|
||||||
|
],
|
||||||
|
"exclusive_noarch": compose.conf[
|
||||||
|
"pkgset_exclusive_arch_considers_noarch"
|
||||||
|
],
|
||||||
},
|
},
|
||||||
f,
|
f,
|
||||||
protocol=pickle.HIGHEST_PROTOCOL,
|
protocol=pickle.HIGHEST_PROTOCOL,
|
||||||
|
@ -791,6 +866,8 @@ class KojiPackageSet(PackageSetBase):
|
||||||
self.log_debug("Failed to load reuse file: %s" % str(e))
|
self.log_debug("Failed to load reuse file: %s" % str(e))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
inherit_to_noarch = compose.conf["pkgset_inherit_exclusive_arch_to_noarch"]
|
||||||
|
exclusive_noarch = compose.conf["pkgset_exclusive_arch_considers_noarch"]
|
||||||
if (
|
if (
|
||||||
reuse_data["allow_invalid_sigkeys"] == self._allow_invalid_sigkeys
|
reuse_data["allow_invalid_sigkeys"] == self._allow_invalid_sigkeys
|
||||||
and reuse_data["packages"] == self.packages
|
and reuse_data["packages"] == self.packages
|
||||||
|
@ -798,6 +875,10 @@ class KojiPackageSet(PackageSetBase):
|
||||||
and reuse_data["extra_builds"] == self.extra_builds
|
and reuse_data["extra_builds"] == self.extra_builds
|
||||||
and reuse_data["sigkeys"] == self.sigkey_ordering
|
and reuse_data["sigkeys"] == self.sigkey_ordering
|
||||||
and reuse_data["include_packages"] == include_packages
|
and reuse_data["include_packages"] == include_packages
|
||||||
|
# If the value is not present in reuse data, the compose was
|
||||||
|
# generated with older version of Pungi. Best to not reuse.
|
||||||
|
and reuse_data.get("inherit_to_noarch") == inherit_to_noarch
|
||||||
|
and reuse_data.get("exclusive_noarch") == exclusive_noarch
|
||||||
):
|
):
|
||||||
self.log_info("Copying repo data for reuse: %s" % old_repo_dir)
|
self.log_info("Copying repo data for reuse: %s" % old_repo_dir)
|
||||||
copy_all(old_repo_dir, repo_dir)
|
copy_all(old_repo_dir, repo_dir)
|
||||||
|
@ -812,6 +893,67 @@ class KojiPackageSet(PackageSetBase):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class KojiMockPackageSet(KojiPackageSet):
|
||||||
|
|
||||||
|
def _is_rpm_signed(self, rpm_path) -> bool:
|
||||||
|
ts = rpm.TransactionSet()
|
||||||
|
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
|
||||||
|
sigkeys = [
|
||||||
|
sigkey.lower() for sigkey in self.sigkey_ordering
|
||||||
|
if sigkey is not None
|
||||||
|
]
|
||||||
|
if not sigkeys:
|
||||||
|
return True
|
||||||
|
with open(rpm_path, 'rb') as fd:
|
||||||
|
header = ts.hdrFromFdno(fd)
|
||||||
|
signature = header[rpm.RPMTAG_SIGGPG] or header[rpm.RPMTAG_SIGPGP]
|
||||||
|
if signature is None:
|
||||||
|
return False
|
||||||
|
pgp_msg = pgpy.PGPMessage.from_blob(signature)
|
||||||
|
return any(
|
||||||
|
signature.signer.lower() in sigkeys
|
||||||
|
for signature in pgp_msg.signatures
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_package_path(self, queue_item):
|
||||||
|
rpm_info, build_info = queue_item
|
||||||
|
|
||||||
|
# Check if this RPM is coming from scratch task.
|
||||||
|
# In this case, we already know the path.
|
||||||
|
if "path_from_task" in rpm_info:
|
||||||
|
return rpm_info["path_from_task"]
|
||||||
|
|
||||||
|
# we replaced this part because pungi uses way
|
||||||
|
# of guessing path of package on koji based on sigkey
|
||||||
|
# we don't need that because all our packages will
|
||||||
|
# be ready for release
|
||||||
|
# signature verification is still done during deps resolution
|
||||||
|
pathinfo = self.koji_wrapper.koji_module.pathinfo
|
||||||
|
|
||||||
|
rpm_path = os.path.join(pathinfo.topdir, pathinfo.rpm(rpm_info))
|
||||||
|
if os.path.isfile(rpm_path):
|
||||||
|
if not self._is_rpm_signed(rpm_path):
|
||||||
|
self._invalid_sigkey_rpms.append(rpm_info)
|
||||||
|
self.log_error(
|
||||||
|
'RPM "%s" not found for sigs: "%s". Path checked: "%s"',
|
||||||
|
rpm_info, self.sigkey_ordering, rpm_path
|
||||||
|
)
|
||||||
|
return
|
||||||
|
return rpm_path
|
||||||
|
else:
|
||||||
|
self.log_warning("RPM %s not found" % rpm_path)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def populate(self, tag, event=None, inherit=True, include_packages=None):
|
||||||
|
result = super().populate(
|
||||||
|
tag=tag,
|
||||||
|
event=event,
|
||||||
|
inherit=inherit,
|
||||||
|
include_packages=include_packages,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _is_src(rpm_info):
|
def _is_src(rpm_info):
|
||||||
"""Check if rpm info object returned by Koji refers to source packages."""
|
"""Check if rpm info object returned by Koji refers to source packages."""
|
||||||
return rpm_info["arch"] in ("src", "nosrc")
|
return rpm_info["arch"] in ("src", "nosrc")
|
||||||
|
|
|
@ -15,8 +15,10 @@
|
||||||
|
|
||||||
from .source_koji import PkgsetSourceKoji
|
from .source_koji import PkgsetSourceKoji
|
||||||
from .source_repos import PkgsetSourceRepos
|
from .source_repos import PkgsetSourceRepos
|
||||||
|
from .source_kojimock import PkgsetSourceKojiMock
|
||||||
|
|
||||||
ALL_SOURCES = {
|
ALL_SOURCES = {
|
||||||
"koji": PkgsetSourceKoji,
|
"koji": PkgsetSourceKoji,
|
||||||
"repos": PkgsetSourceRepos,
|
"repos": PkgsetSourceRepos,
|
||||||
|
"kojimock": PkgsetSourceKojiMock,
|
||||||
}
|
}
|
||||||
|
|
|
@ -193,17 +193,13 @@ class PkgsetSourceKoji(pungi.phases.pkgset.source.PkgsetSourceBase):
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
compose = self.compose
|
compose = self.compose
|
||||||
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
self.koji_wrapper = pungi.wrappers.kojiwrapper.KojiWrapper(compose)
|
||||||
# path prefix must contain trailing '/'
|
package_sets = get_pkgset_from_koji(self.compose, self.koji_wrapper)
|
||||||
path_prefix = self.koji_wrapper.koji_module.config.topdir.rstrip("/") + "/"
|
return (package_sets, self.compose.koji_downloader.path_prefix)
|
||||||
package_sets = get_pkgset_from_koji(
|
|
||||||
self.compose, self.koji_wrapper, path_prefix
|
|
||||||
)
|
|
||||||
return (package_sets, path_prefix)
|
|
||||||
|
|
||||||
|
|
||||||
def get_pkgset_from_koji(compose, koji_wrapper, path_prefix):
|
def get_pkgset_from_koji(compose, koji_wrapper):
|
||||||
event_info = get_koji_event_info(compose, koji_wrapper)
|
event_info = get_koji_event_info(compose, koji_wrapper)
|
||||||
return populate_global_pkgset(compose, koji_wrapper, path_prefix, event_info)
|
return populate_global_pkgset(compose, koji_wrapper, event_info)
|
||||||
|
|
||||||
|
|
||||||
def _add_module_to_variant(
|
def _add_module_to_variant(
|
||||||
|
@ -232,7 +228,7 @@ def _add_module_to_variant(
|
||||||
continue
|
continue
|
||||||
typedir = koji_wrapper.koji_module.pathinfo.typedir(build, archive["btype"])
|
typedir = koji_wrapper.koji_module.pathinfo.typedir(build, archive["btype"])
|
||||||
filename = archive["filename"]
|
filename = archive["filename"]
|
||||||
file_path = os.path.join(typedir, filename)
|
file_path = compose.koji_downloader.get_file(os.path.join(typedir, filename))
|
||||||
try:
|
try:
|
||||||
# If there are two dots, the arch is in the middle. MBS uploads
|
# If there are two dots, the arch is in the middle. MBS uploads
|
||||||
# files with actual architecture in the filename, but Pungi deals
|
# files with actual architecture in the filename, but Pungi deals
|
||||||
|
@ -270,9 +266,14 @@ def _add_module_to_variant(
|
||||||
"Module %s does not have metadata for arch %s and is not filtered "
|
"Module %s does not have metadata for arch %s and is not filtered "
|
||||||
"out via filter_modules option." % (nsvc, arch)
|
"out via filter_modules option." % (nsvc, arch)
|
||||||
)
|
)
|
||||||
mod_stream = read_single_module_stream_from_file(
|
try:
|
||||||
mmds[filename], compose, arch, build
|
mod_stream = read_single_module_stream_from_file(
|
||||||
)
|
mmds[filename], compose, arch, build
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
# libmodulemd raises various GLib exceptions with not very helpful
|
||||||
|
# messages. Let's replace it with something more useful.
|
||||||
|
raise RuntimeError("Failed to read %s: %s", mmds[filename], str(exc))
|
||||||
if mod_stream:
|
if mod_stream:
|
||||||
added = True
|
added = True
|
||||||
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
variant.arch_mmds.setdefault(arch, {})[nsvc] = mod_stream
|
||||||
|
@ -395,7 +396,13 @@ def _is_filtered_out(compose, variant, arch, module_name, module_stream):
|
||||||
|
|
||||||
|
|
||||||
def _get_modules_from_koji(
|
def _get_modules_from_koji(
|
||||||
compose, koji_wrapper, event, variant, variant_tags, tag_to_mmd, exclude_module_ns
|
compose,
|
||||||
|
koji_wrapper,
|
||||||
|
event,
|
||||||
|
variant,
|
||||||
|
variant_tags,
|
||||||
|
tag_to_mmd,
|
||||||
|
exclude_module_ns,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Loads modules for given `variant` from koji `session`, adds them to
|
Loads modules for given `variant` from koji `session`, adds them to
|
||||||
|
@ -670,7 +677,7 @@ def _get_modules_from_koji_tags(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
def populate_global_pkgset(compose, koji_wrapper, event):
|
||||||
all_arches = get_all_arches(compose)
|
all_arches = get_all_arches(compose)
|
||||||
|
|
||||||
# List of compose tags from which we create this compose
|
# List of compose tags from which we create this compose
|
||||||
|
@ -764,7 +771,12 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
||||||
|
|
||||||
if extra_modules:
|
if extra_modules:
|
||||||
_add_extra_modules_to_variant(
|
_add_extra_modules_to_variant(
|
||||||
compose, koji_wrapper, variant, extra_modules, variant_tags, tag_to_mmd
|
compose,
|
||||||
|
koji_wrapper,
|
||||||
|
variant,
|
||||||
|
extra_modules,
|
||||||
|
variant_tags,
|
||||||
|
tag_to_mmd,
|
||||||
)
|
)
|
||||||
|
|
||||||
variant_scratch_modules = get_variant_data(
|
variant_scratch_modules = get_variant_data(
|
||||||
|
@ -791,17 +803,23 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
||||||
|
|
||||||
pkgsets = []
|
pkgsets = []
|
||||||
|
|
||||||
|
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
||||||
|
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
||||||
|
|
||||||
|
if not pkgset_koji_tags and (extra_builds or extra_tasks):
|
||||||
|
# We have extra packages to pull in, but no tag to merge them with.
|
||||||
|
compose_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
||||||
|
pkgset_koji_tags.append(pungi.phases.pkgset.pkgsets.MISSING_KOJI_TAG)
|
||||||
|
|
||||||
# Get package set for each compose tag and merge it to global package
|
# Get package set for each compose tag and merge it to global package
|
||||||
# list. Also prepare per-variant pkgset, because we do not have list
|
# list. Also prepare per-variant pkgset, because we do not have list
|
||||||
# of binary RPMs in module definition - there is just list of SRPMs.
|
# of binary RPMs in module definition - there is just list of SRPMs.
|
||||||
for compose_tag in compose_tags:
|
for compose_tag in compose_tags:
|
||||||
compose.log_info("Loading package set for tag %s", compose_tag)
|
compose.log_info("Loading package set for tag %s", compose_tag)
|
||||||
|
kwargs = {}
|
||||||
if compose_tag in pkgset_koji_tags:
|
if compose_tag in pkgset_koji_tags:
|
||||||
extra_builds = force_list(compose.conf.get("pkgset_koji_builds", []))
|
kwargs["extra_builds"] = extra_builds
|
||||||
extra_tasks = force_list(compose.conf.get("pkgset_koji_scratch_tasks", []))
|
kwargs["extra_tasks"] = extra_tasks
|
||||||
else:
|
|
||||||
extra_builds = []
|
|
||||||
extra_tasks = []
|
|
||||||
|
|
||||||
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet(
|
||||||
compose_tag,
|
compose_tag,
|
||||||
|
@ -813,10 +831,10 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
||||||
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
allow_invalid_sigkeys=allow_invalid_sigkeys,
|
||||||
populate_only_packages=populate_only_packages_to_gather,
|
populate_only_packages=populate_only_packages_to_gather,
|
||||||
cache_region=compose.cache_region,
|
cache_region=compose.cache_region,
|
||||||
extra_builds=extra_builds,
|
|
||||||
extra_tasks=extra_tasks,
|
|
||||||
signed_packages_retries=compose.conf["signed_packages_retries"],
|
signed_packages_retries=compose.conf["signed_packages_retries"],
|
||||||
signed_packages_wait=compose.conf["signed_packages_wait"],
|
signed_packages_wait=compose.conf["signed_packages_wait"],
|
||||||
|
downloader=compose.koji_downloader,
|
||||||
|
**kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if we have cache for this tag from previous compose. If so, use
|
# Check if we have cache for this tag from previous compose. If so, use
|
||||||
|
@ -880,7 +898,6 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
||||||
)
|
)
|
||||||
for variant in compose.all_variants.values():
|
for variant in compose.all_variants.values():
|
||||||
if compose_tag in variant_tags[variant]:
|
if compose_tag in variant_tags[variant]:
|
||||||
|
|
||||||
# If it's a modular tag, store the package set for the module.
|
# If it's a modular tag, store the package set for the module.
|
||||||
for nsvc, koji_tag in variant.module_uid_to_koji_tag.items():
|
for nsvc, koji_tag in variant.module_uid_to_koji_tag.items():
|
||||||
if compose_tag == koji_tag:
|
if compose_tag == koji_tag:
|
||||||
|
@ -903,7 +920,7 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event):
|
||||||
MaterializedPackageSet.create,
|
MaterializedPackageSet.create,
|
||||||
compose,
|
compose,
|
||||||
pkgset,
|
pkgset,
|
||||||
path_prefix,
|
compose.koji_downloader.path_prefix,
|
||||||
mmd=tag_to_mmd.get(pkgset.name),
|
mmd=tag_to_mmd.get(pkgset.name),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -94,7 +94,7 @@ class Runroot(kobo.log.LoggingBase):
|
||||||
log_file = os.path.join(log_dir, "program.log")
|
log_file = os.path.join(log_dir, "program.log")
|
||||||
try:
|
try:
|
||||||
with open(log_file) as f:
|
with open(log_file) as f:
|
||||||
for line in f:
|
for line in f.readlines():
|
||||||
if "losetup: cannot find an unused loop device" in line:
|
if "losetup: cannot find an unused loop device" in line:
|
||||||
return True
|
return True
|
||||||
if re.match("losetup: .* failed to set up loop device", line):
|
if re.match("losetup: .* failed to set up loop device", line):
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
|
from pungi.util import format_size
|
||||||
|
|
||||||
|
|
||||||
|
LOCK_RE = re.compile(r".*\.lock(\|[A-Za-z0-9]+)*$")
|
||||||
|
|
||||||
|
|
||||||
|
def should_be_cleaned_up(path, st, threshold):
|
||||||
|
if st.st_nlink == 1 and st.st_mtime < threshold:
|
||||||
|
# No other instances, older than limit
|
||||||
|
return True
|
||||||
|
|
||||||
|
if LOCK_RE.match(path) and st.st_mtime < threshold:
|
||||||
|
# Suspiciously old lock
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("CACHE_DIR")
|
||||||
|
parser.add_argument("-n", "--dry-run", action="store_true")
|
||||||
|
parser.add_argument("--verbose", action="store_true")
|
||||||
|
parser.add_argument(
|
||||||
|
"--max-age",
|
||||||
|
help="how old files should be considered for deletion",
|
||||||
|
default=7,
|
||||||
|
type=int,
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
topdir = os.path.abspath(args.CACHE_DIR)
|
||||||
|
max_age = args.max_age * 24 * 3600
|
||||||
|
|
||||||
|
cleaned_up = 0
|
||||||
|
|
||||||
|
threshold = time.time() - max_age
|
||||||
|
for dirpath, dirnames, filenames in os.walk(topdir):
|
||||||
|
for f in filenames:
|
||||||
|
filepath = os.path.join(dirpath, f)
|
||||||
|
st = os.stat(filepath)
|
||||||
|
if should_be_cleaned_up(filepath, st, threshold):
|
||||||
|
if args.verbose:
|
||||||
|
print("RM %s" % filepath)
|
||||||
|
cleaned_up += st.st_size
|
||||||
|
if not args.dry_run:
|
||||||
|
os.remove(filepath)
|
||||||
|
if not dirnames and not filenames:
|
||||||
|
if args.verbose:
|
||||||
|
print("RMDIR %s" % dirpath)
|
||||||
|
if not args.dry_run:
|
||||||
|
os.rmdir(dirpath)
|
||||||
|
|
||||||
|
if args.dry_run:
|
||||||
|
print("Would reclaim %s bytes." % format_size(cleaned_up))
|
||||||
|
else:
|
||||||
|
print("Reclaimed %s bytes." % format_size(cleaned_up))
|
|
@ -171,32 +171,11 @@ def main():
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--offline", action="store_true", help="Do not resolve git references."
|
"--offline", action="store_true", help="Do not resolve git references."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
"--multi",
|
|
||||||
metavar="DIR",
|
|
||||||
help=(
|
|
||||||
"Treat source as config for pungi-orchestrate and store dump into "
|
|
||||||
"given directory."
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
defines = config_utils.extract_defines(args.define)
|
defines = config_utils.extract_defines(args.define)
|
||||||
|
|
||||||
if args.multi:
|
|
||||||
if len(args.sources) > 1:
|
|
||||||
parser.error("Only one multi config can be specified.")
|
|
||||||
|
|
||||||
return dump_multi_config(
|
|
||||||
args.sources[0],
|
|
||||||
dest=args.multi,
|
|
||||||
defines=defines,
|
|
||||||
just_dump=args.just_dump,
|
|
||||||
event=args.freeze_event,
|
|
||||||
offline=args.offline,
|
|
||||||
)
|
|
||||||
|
|
||||||
return process_file(
|
return process_file(
|
||||||
args.sources,
|
args.sources,
|
||||||
defines=defines,
|
defines=defines,
|
||||||
|
|
|
@ -0,0 +1,441 @@
|
||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from shutil import rmtree
|
||||||
|
from typing import (
|
||||||
|
AnyStr,
|
||||||
|
List,
|
||||||
|
Dict,
|
||||||
|
Optional,
|
||||||
|
)
|
||||||
|
|
||||||
|
import createrepo_c as cr
|
||||||
|
import requests
|
||||||
|
import yaml
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
|
||||||
|
from .create_packages_json import (
|
||||||
|
PackagesGenerator,
|
||||||
|
RepoInfo,
|
||||||
|
VariantInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ExtraVariantInfo(VariantInfo):
|
||||||
|
|
||||||
|
modules: List[AnyStr] = field(default_factory=list)
|
||||||
|
packages: List[AnyStr] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class CreateExtraRepo(PackagesGenerator):
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
variants: List[ExtraVariantInfo],
|
||||||
|
bs_auth_token: AnyStr,
|
||||||
|
local_repository_path: AnyStr,
|
||||||
|
clear_target_repo: bool = True,
|
||||||
|
):
|
||||||
|
self.variants = [] # type: List[ExtraVariantInfo]
|
||||||
|
super().__init__(variants, [], [])
|
||||||
|
self.auth_headers = {
|
||||||
|
'Authorization': f'Bearer {bs_auth_token}',
|
||||||
|
}
|
||||||
|
# modules data of modules.yaml.gz from an existing local repo
|
||||||
|
self.local_modules_data = []
|
||||||
|
self.local_repository_path = local_repository_path
|
||||||
|
# path to modules.yaml, which generated by the class
|
||||||
|
self.default_modules_yaml_path = os.path.join(
|
||||||
|
local_repository_path,
|
||||||
|
'modules.yaml',
|
||||||
|
)
|
||||||
|
if clear_target_repo:
|
||||||
|
if os.path.exists(self.local_repository_path):
|
||||||
|
rmtree(self.local_repository_path)
|
||||||
|
os.makedirs(self.local_repository_path, exist_ok=True)
|
||||||
|
else:
|
||||||
|
self._read_local_modules_yaml()
|
||||||
|
|
||||||
|
def _read_local_modules_yaml(self):
|
||||||
|
"""
|
||||||
|
Read modules data from an existin local repo
|
||||||
|
"""
|
||||||
|
repomd_file_path = os.path.join(
|
||||||
|
self.local_repository_path,
|
||||||
|
'repodata',
|
||||||
|
'repomd.xml',
|
||||||
|
)
|
||||||
|
repomd_object = self._parse_repomd(repomd_file_path)
|
||||||
|
for repomd_record in repomd_object.records:
|
||||||
|
if repomd_record.type != 'modules':
|
||||||
|
continue
|
||||||
|
modules_yaml_path = os.path.join(
|
||||||
|
self.local_repository_path,
|
||||||
|
repomd_record.location_href,
|
||||||
|
)
|
||||||
|
self.local_modules_data = list(self._parse_modules_file(
|
||||||
|
modules_yaml_path,
|
||||||
|
))
|
||||||
|
break
|
||||||
|
|
||||||
|
def _dump_local_modules_yaml(self):
|
||||||
|
"""
|
||||||
|
Dump merged modules data to an local repo
|
||||||
|
"""
|
||||||
|
if self.local_modules_data:
|
||||||
|
with open(self.default_modules_yaml_path, 'w') as yaml_file:
|
||||||
|
yaml.dump_all(
|
||||||
|
self.local_modules_data,
|
||||||
|
yaml_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_repo_info_from_bs_repo(
|
||||||
|
auth_token: AnyStr,
|
||||||
|
build_id: AnyStr,
|
||||||
|
arch: AnyStr,
|
||||||
|
packages: Optional[List[AnyStr]] = None,
|
||||||
|
modules: Optional[List[AnyStr]] = None,
|
||||||
|
) -> List[ExtraVariantInfo]:
|
||||||
|
"""
|
||||||
|
Get info about a BS repo and save it to
|
||||||
|
an object of class ExtraRepoInfo
|
||||||
|
:param auth_token: Auth token to Build System
|
||||||
|
:param build_id: ID of a build from BS
|
||||||
|
:param arch: an architecture of repo which will be used
|
||||||
|
:param packages: list of names of packages which will be put to an
|
||||||
|
local repo from a BS repo
|
||||||
|
:param modules: list of names of modules which will be put to an
|
||||||
|
local repo from a BS repo
|
||||||
|
:return: list of ExtraRepoInfo with info about the BS repos
|
||||||
|
"""
|
||||||
|
|
||||||
|
bs_url = 'https://build.cloudlinux.com'
|
||||||
|
api_uri = 'api/v1'
|
||||||
|
bs_repo_suffix = 'build_repos'
|
||||||
|
|
||||||
|
variants_info = []
|
||||||
|
|
||||||
|
# get the full info about a BS repo
|
||||||
|
repo_request = requests.get(
|
||||||
|
url=os.path.join(
|
||||||
|
bs_url,
|
||||||
|
api_uri,
|
||||||
|
'builds',
|
||||||
|
build_id,
|
||||||
|
),
|
||||||
|
headers={
|
||||||
|
'Authorization': f'Bearer {auth_token}',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
repo_request.raise_for_status()
|
||||||
|
result = repo_request.json()
|
||||||
|
for build_platform in result['build_platforms']:
|
||||||
|
platform_name = build_platform['name']
|
||||||
|
for architecture in build_platform['architectures']:
|
||||||
|
# skip repo with unsuitable architecture
|
||||||
|
if architecture != arch:
|
||||||
|
continue
|
||||||
|
variant_info = ExtraVariantInfo(
|
||||||
|
name=f'{build_id}-{platform_name}-{architecture}',
|
||||||
|
arch=architecture,
|
||||||
|
packages=packages,
|
||||||
|
modules=modules,
|
||||||
|
repos=[
|
||||||
|
RepoInfo(
|
||||||
|
path=os.path.join(
|
||||||
|
bs_url,
|
||||||
|
bs_repo_suffix,
|
||||||
|
build_id,
|
||||||
|
platform_name,
|
||||||
|
),
|
||||||
|
folder=architecture,
|
||||||
|
is_remote=True,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
variants_info.append(variant_info)
|
||||||
|
return variants_info
|
||||||
|
|
||||||
|
def _create_local_extra_repo(self):
|
||||||
|
"""
|
||||||
|
Call `createrepo_c <path_to_repo>` for creating a local repo
|
||||||
|
"""
|
||||||
|
subprocess.call(
|
||||||
|
f'createrepo_c {self.local_repository_path}',
|
||||||
|
shell=True,
|
||||||
|
)
|
||||||
|
# remove an unnecessary temporary modules.yaml
|
||||||
|
if os.path.exists(self.default_modules_yaml_path):
|
||||||
|
os.remove(self.default_modules_yaml_path)
|
||||||
|
|
||||||
|
def get_remote_file_content(
|
||||||
|
self,
|
||||||
|
file_url: AnyStr,
|
||||||
|
) -> AnyStr:
|
||||||
|
"""
|
||||||
|
Get content from a remote file and write it to a temp file
|
||||||
|
:param file_url: url of a remote file
|
||||||
|
:return: path to a temp file
|
||||||
|
"""
|
||||||
|
|
||||||
|
file_request = requests.get(
|
||||||
|
url=file_url,
|
||||||
|
# for the case when we get a file from BS
|
||||||
|
headers=self.auth_headers,
|
||||||
|
)
|
||||||
|
file_request.raise_for_status()
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||||
|
file_stream.write(file_request.content)
|
||||||
|
return file_stream.name
|
||||||
|
|
||||||
|
def _download_rpm_to_local_repo(
|
||||||
|
self,
|
||||||
|
package_location: AnyStr,
|
||||||
|
repo_info: RepoInfo,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Download a rpm package from a remote repo and save it to a local repo
|
||||||
|
:param package_location: relative uri of a package in a remote repo
|
||||||
|
:param repo_info: info about a remote repo which contains a specific
|
||||||
|
rpm package
|
||||||
|
"""
|
||||||
|
rpm_package_remote_path = os.path.join(
|
||||||
|
repo_info.path,
|
||||||
|
repo_info.folder,
|
||||||
|
package_location,
|
||||||
|
)
|
||||||
|
rpm_package_local_path = os.path.join(
|
||||||
|
self.local_repository_path,
|
||||||
|
os.path.basename(package_location),
|
||||||
|
)
|
||||||
|
rpm_request = requests.get(
|
||||||
|
url=rpm_package_remote_path,
|
||||||
|
headers=self.auth_headers,
|
||||||
|
)
|
||||||
|
rpm_request.raise_for_status()
|
||||||
|
with open(rpm_package_local_path, 'wb') as rpm_file:
|
||||||
|
rpm_file.write(rpm_request.content)
|
||||||
|
|
||||||
|
def _download_packages(
|
||||||
|
self,
|
||||||
|
packages: Dict[AnyStr, cr.Package],
|
||||||
|
variant_info: ExtraVariantInfo
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Download all defined packages from a remote repo
|
||||||
|
:param packages: information about all packages (including
|
||||||
|
modularity) in a remote repo
|
||||||
|
:param variant_info: information about a remote variant
|
||||||
|
"""
|
||||||
|
for package in packages.values():
|
||||||
|
package_name = package.name
|
||||||
|
# Skip a current package from a remote repo if we defined
|
||||||
|
# the list packages and a current package doesn't belong to it
|
||||||
|
if variant_info.packages and \
|
||||||
|
package_name not in variant_info.packages:
|
||||||
|
continue
|
||||||
|
for repo_info in variant_info.repos:
|
||||||
|
self._download_rpm_to_local_repo(
|
||||||
|
package_location=package.location_href,
|
||||||
|
repo_info=repo_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _download_modules(
|
||||||
|
self,
|
||||||
|
modules_data: List[Dict],
|
||||||
|
variant_info: ExtraVariantInfo,
|
||||||
|
packages: Dict[AnyStr, cr.Package]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Download all defined modularity packages and their data from
|
||||||
|
a remote repo
|
||||||
|
:param modules_data: information about all modules in a remote repo
|
||||||
|
:param variant_info: information about a remote variant
|
||||||
|
:param packages: information about all packages (including
|
||||||
|
modularity) in a remote repo
|
||||||
|
"""
|
||||||
|
for module in modules_data:
|
||||||
|
module_data = module['data']
|
||||||
|
# Skip a current module from a remote repo if we defined
|
||||||
|
# the list modules and a current module doesn't belong to it
|
||||||
|
if variant_info.modules and \
|
||||||
|
module_data['name'] not in variant_info.modules:
|
||||||
|
continue
|
||||||
|
# we should add info about a module if the local repodata
|
||||||
|
# doesn't have it
|
||||||
|
if module not in self.local_modules_data:
|
||||||
|
self.local_modules_data.append(module)
|
||||||
|
# just skip a module's record if it doesn't have rpm artifact
|
||||||
|
if module['document'] != 'modulemd' or \
|
||||||
|
'artifacts' not in module_data or \
|
||||||
|
'rpms' not in module_data['artifacts']:
|
||||||
|
continue
|
||||||
|
for rpm in module['data']['artifacts']['rpms']:
|
||||||
|
# Empty repo_info.packages means that we will download
|
||||||
|
# all packages from repo including
|
||||||
|
# the modularity packages
|
||||||
|
if not variant_info.packages:
|
||||||
|
break
|
||||||
|
# skip a rpm if it doesn't belong to a processed repo
|
||||||
|
if rpm not in packages:
|
||||||
|
continue
|
||||||
|
for repo_info in variant_info.repos:
|
||||||
|
self._download_rpm_to_local_repo(
|
||||||
|
package_location=packages[rpm].location_href,
|
||||||
|
repo_info=repo_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_extra_repo(self):
|
||||||
|
"""
|
||||||
|
1. Get from the remote repos the specific (or all) packages/modules
|
||||||
|
2. Save them to a local repo
|
||||||
|
3. Save info about the modules to a local repo
|
||||||
|
3. Call `createrepo_c` which creates a local repo
|
||||||
|
with the right repodata
|
||||||
|
"""
|
||||||
|
for variant_info in self.variants:
|
||||||
|
for repo_info in variant_info.repos:
|
||||||
|
repomd_records = self._get_repomd_records(
|
||||||
|
repo_info=repo_info,
|
||||||
|
)
|
||||||
|
packages_iterator = self.get_packages_iterator(repo_info)
|
||||||
|
# parse the repodata (including modules.yaml.gz)
|
||||||
|
modules_data = self._parse_module_repomd_record(
|
||||||
|
repo_info=repo_info,
|
||||||
|
repomd_records=repomd_records,
|
||||||
|
)
|
||||||
|
# convert the packages dict to more usable form
|
||||||
|
# for future checking that a rpm from the module's artifacts
|
||||||
|
# belongs to a processed repository
|
||||||
|
packages = {
|
||||||
|
f'{package.name}-{package.epoch}:{package.version}-'
|
||||||
|
f'{package.release}.{package.arch}':
|
||||||
|
package for package in packages_iterator
|
||||||
|
}
|
||||||
|
self._download_modules(
|
||||||
|
modules_data=modules_data,
|
||||||
|
variant_info=variant_info,
|
||||||
|
packages=packages,
|
||||||
|
)
|
||||||
|
self._download_packages(
|
||||||
|
packages=packages,
|
||||||
|
variant_info=variant_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._dump_local_modules_yaml()
|
||||||
|
self._create_local_extra_repo()
|
||||||
|
|
||||||
|
|
||||||
|
def create_parser():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument(
|
||||||
|
'--bs-auth-token',
|
||||||
|
help='Auth token for Build System',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--local-repo-path',
|
||||||
|
help='Path to a local repo. E.g. /var/repo/test_repo',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--clear-local-repo',
|
||||||
|
help='Clear a local repo before creating a new',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo',
|
||||||
|
action='append',
|
||||||
|
help='Path to a folder with repofolders or build id. E.g. '
|
||||||
|
'"http://koji.cloudlinux.com/mirrors/rhel_mirror" or '
|
||||||
|
'"601809b3c2f5b0e458b14cd3"',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo-folder',
|
||||||
|
action='append',
|
||||||
|
help='A folder which contains folder repodata . E.g. "baseos-stream"',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--repo-arch',
|
||||||
|
action='append',
|
||||||
|
help='What architecture packages a repository contains. E.g. "x86_64"',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--packages',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help='A list of packages names which we want to download to local '
|
||||||
|
'extra repo. We will download all of packages if param is empty',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--modules',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help='A list of modules names which we want to download to local '
|
||||||
|
'extra repo. We will download all of modules if param is empty',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def cli_main():
|
||||||
|
args = create_parser().parse_args()
|
||||||
|
repos_info = []
|
||||||
|
for repo, repo_folder, repo_arch, packages, modules in zip(
|
||||||
|
args.repo,
|
||||||
|
args.repo_folder,
|
||||||
|
args.repo_arch,
|
||||||
|
args.packages,
|
||||||
|
args.modules,
|
||||||
|
):
|
||||||
|
modules = modules.split()
|
||||||
|
packages = packages.split()
|
||||||
|
if repo.startswith('http://'):
|
||||||
|
repos_info.append(
|
||||||
|
ExtraVariantInfo(
|
||||||
|
name=repo_folder,
|
||||||
|
arch=repo_arch,
|
||||||
|
repos=[
|
||||||
|
RepoInfo(
|
||||||
|
path=repo,
|
||||||
|
folder=repo_folder,
|
||||||
|
is_remote=True,
|
||||||
|
)
|
||||||
|
],
|
||||||
|
modules=modules,
|
||||||
|
packages=packages,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
repos_info.extend(
|
||||||
|
CreateExtraRepo.get_repo_info_from_bs_repo(
|
||||||
|
auth_token=args.bs_auth_token,
|
||||||
|
build_id=repo,
|
||||||
|
arch=repo_arch,
|
||||||
|
modules=modules,
|
||||||
|
packages=packages,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
cer = CreateExtraRepo(
|
||||||
|
variants=repos_info,
|
||||||
|
bs_auth_token=args.bs_auth_token,
|
||||||
|
local_repository_path=args.local_repo_path,
|
||||||
|
clear_target_repo=args.clear_local_repo,
|
||||||
|
)
|
||||||
|
cer.create_extra_repo()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
cli_main()
|
|
@ -0,0 +1,514 @@
|
||||||
|
# coding=utf-8
|
||||||
|
"""
|
||||||
|
The tool allow to generate package.json. This file is used by pungi
|
||||||
|
# as parameter `gather_prepopulate`
|
||||||
|
Sample of using repodata files taken from
|
||||||
|
https://github.com/rpm-software-management/createrepo_c/blob/master/examples/python/repodata_parsing.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import gzip
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import lzma
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
from collections import defaultdict
|
||||||
|
from itertools import tee
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import (
|
||||||
|
AnyStr,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
Any,
|
||||||
|
Iterator,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
import binascii
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import rpm
|
||||||
|
import yaml
|
||||||
|
from createrepo_c import (
|
||||||
|
Package,
|
||||||
|
PackageIterator,
|
||||||
|
Repomd,
|
||||||
|
RepomdRecord,
|
||||||
|
)
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from kobo.rpmlib import parse_nvra
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_compressed_file(first_two_bytes: bytes, initial_bytes: bytes):
|
||||||
|
return binascii.hexlify(first_two_bytes) == initial_bytes
|
||||||
|
|
||||||
|
|
||||||
|
def is_gzip_file(first_two_bytes):
|
||||||
|
return _is_compressed_file(
|
||||||
|
first_two_bytes=first_two_bytes,
|
||||||
|
initial_bytes=b'1f8b',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_xz_file(first_two_bytes):
|
||||||
|
return _is_compressed_file(
|
||||||
|
first_two_bytes=first_two_bytes,
|
||||||
|
initial_bytes=b'fd37',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RepoInfo:
|
||||||
|
# path to a directory with repo directories. E.g. '/var/repos' contains
|
||||||
|
# 'appstream', 'baseos', etc.
|
||||||
|
# Or 'http://koji.cloudlinux.com/mirrors/rhel_mirror' if you are
|
||||||
|
# using remote repo
|
||||||
|
path: str
|
||||||
|
# name of folder with a repodata folder. E.g. 'baseos', 'appstream', etc
|
||||||
|
folder: str
|
||||||
|
# Is a repo remote or local
|
||||||
|
is_remote: bool
|
||||||
|
# Is a reference repository (usually it's a RHEL repo)
|
||||||
|
# Layout of packages from such repository will be taken as example
|
||||||
|
# Only layout of specific package (which doesn't exist
|
||||||
|
# in a reference repository) will be taken as example
|
||||||
|
is_reference: bool = False
|
||||||
|
# The packages from 'present' repo will be added to a variant.
|
||||||
|
# The packages from 'absent' repo will be removed from a variant.
|
||||||
|
repo_type: str = 'present'
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class VariantInfo:
|
||||||
|
# name of variant. E.g. 'BaseOS', 'AppStream', etc
|
||||||
|
name: AnyStr
|
||||||
|
# architecture of variant. E.g. 'x86_64', 'i686', etc
|
||||||
|
arch: AnyStr
|
||||||
|
# The packages which will be not added to a variant
|
||||||
|
excluded_packages: List[str] = field(default_factory=list)
|
||||||
|
# Repos of a variant
|
||||||
|
repos: List[RepoInfo] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class PackagesGenerator:
|
||||||
|
|
||||||
|
repo_arches = defaultdict(lambda: list(('noarch',)))
|
||||||
|
addon_repos = {
|
||||||
|
'x86_64': ['i686'],
|
||||||
|
'ppc64le': [],
|
||||||
|
'aarch64': [],
|
||||||
|
's390x': [],
|
||||||
|
'i686': [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
variants: List[VariantInfo],
|
||||||
|
excluded_packages: List[AnyStr],
|
||||||
|
included_packages: List[AnyStr],
|
||||||
|
):
|
||||||
|
self.variants = variants
|
||||||
|
self.pkgs = dict()
|
||||||
|
self.excluded_packages = excluded_packages
|
||||||
|
self.included_packages = included_packages
|
||||||
|
self.tmp_files = [] # type: list[Path]
|
||||||
|
for arch, arch_list in self.addon_repos.items():
|
||||||
|
self.repo_arches[arch].extend(arch_list)
|
||||||
|
self.repo_arches[arch].append(arch)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
for tmp_file in self.tmp_files:
|
||||||
|
if tmp_file.exists():
|
||||||
|
tmp_file.unlink()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_full_repo_path(repo_info: RepoInfo):
|
||||||
|
result = os.path.join(
|
||||||
|
repo_info.path,
|
||||||
|
repo_info.folder
|
||||||
|
)
|
||||||
|
if repo_info.is_remote:
|
||||||
|
result = urljoin(
|
||||||
|
repo_info.path + '/',
|
||||||
|
repo_info.folder,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _warning_callback(warning_type, message):
|
||||||
|
"""
|
||||||
|
Warning callback for createrepo_c parsing functions
|
||||||
|
"""
|
||||||
|
print(f'Warning message: "{message}"; warning type: "{warning_type}"')
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_remote_file_content(self, file_url: AnyStr) -> AnyStr:
|
||||||
|
"""
|
||||||
|
Get content from a remote file and write it to a temp file
|
||||||
|
:param file_url: url of a remote file
|
||||||
|
:return: path to a temp file
|
||||||
|
"""
|
||||||
|
|
||||||
|
file_request = requests.get(
|
||||||
|
url=file_url,
|
||||||
|
)
|
||||||
|
file_request.raise_for_status()
|
||||||
|
with tempfile.NamedTemporaryFile(delete=False) as file_stream:
|
||||||
|
file_stream.write(file_request.content)
|
||||||
|
self.tmp_files.append(Path(file_stream.name))
|
||||||
|
return file_stream.name
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_repomd(repomd_file_path: AnyStr) -> Repomd:
|
||||||
|
"""
|
||||||
|
Parse file repomd.xml and create object Repomd
|
||||||
|
:param repomd_file_path: path to local repomd.xml
|
||||||
|
"""
|
||||||
|
return Repomd(repomd_file_path)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _parse_modules_file(
|
||||||
|
cls,
|
||||||
|
modules_file_path: AnyStr,
|
||||||
|
|
||||||
|
) -> Iterator[Any]:
|
||||||
|
"""
|
||||||
|
Parse modules.yaml.gz and returns parsed data
|
||||||
|
:param modules_file_path: path to local modules.yaml.gz
|
||||||
|
:return: List of dict for each module in a repo
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open(modules_file_path, 'rb') as modules_file:
|
||||||
|
data = modules_file.read()
|
||||||
|
if is_gzip_file(data[:2]):
|
||||||
|
data = gzip.decompress(data)
|
||||||
|
elif is_xz_file(data[:2]):
|
||||||
|
data = lzma.decompress(data)
|
||||||
|
return yaml.load_all(
|
||||||
|
data,
|
||||||
|
Loader=yaml.BaseLoader,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_repomd_records(
|
||||||
|
self,
|
||||||
|
repo_info: RepoInfo,
|
||||||
|
) -> List[RepomdRecord]:
|
||||||
|
"""
|
||||||
|
Get, parse file repomd.xml and extract from it repomd records
|
||||||
|
:param repo_info: structure which contains info about a current repo
|
||||||
|
:return: list with repomd records
|
||||||
|
"""
|
||||||
|
repomd_file_path = os.path.join(
|
||||||
|
repo_info.path,
|
||||||
|
repo_info.folder,
|
||||||
|
'repodata',
|
||||||
|
'repomd.xml',
|
||||||
|
)
|
||||||
|
if repo_info.is_remote:
|
||||||
|
repomd_file_path = urljoin(
|
||||||
|
urljoin(
|
||||||
|
repo_info.path + '/',
|
||||||
|
repo_info.folder
|
||||||
|
) + '/',
|
||||||
|
'repodata/repomd.xml'
|
||||||
|
)
|
||||||
|
repomd_file_path = self.get_remote_file_content(repomd_file_path)
|
||||||
|
|
||||||
|
repomd_object = self._parse_repomd(repomd_file_path)
|
||||||
|
if repo_info.is_remote:
|
||||||
|
os.remove(repomd_file_path)
|
||||||
|
return repomd_object.records
|
||||||
|
|
||||||
|
def _download_repomd_records(
|
||||||
|
self,
|
||||||
|
repo_info: RepoInfo,
|
||||||
|
repomd_records: List[RepomdRecord],
|
||||||
|
repomd_records_dict: Dict[str, str],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Download repomd records
|
||||||
|
:param repo_info: structure which contains info about a current repo
|
||||||
|
:param repomd_records: list with repomd records
|
||||||
|
:param repomd_records_dict: dict with paths to repodata files
|
||||||
|
"""
|
||||||
|
for repomd_record in repomd_records:
|
||||||
|
if repomd_record.type not in (
|
||||||
|
'primary',
|
||||||
|
'filelists',
|
||||||
|
'other',
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
repomd_record_file_path = os.path.join(
|
||||||
|
repo_info.path,
|
||||||
|
repo_info.folder,
|
||||||
|
repomd_record.location_href,
|
||||||
|
)
|
||||||
|
if repo_info.is_remote:
|
||||||
|
repomd_record_file_path = self.get_remote_file_content(
|
||||||
|
repomd_record_file_path)
|
||||||
|
repomd_records_dict[repomd_record.type] = repomd_record_file_path
|
||||||
|
|
||||||
|
def _parse_module_repomd_record(
|
||||||
|
self,
|
||||||
|
repo_info: RepoInfo,
|
||||||
|
repomd_records: List[RepomdRecord],
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
Download repomd records
|
||||||
|
:param repo_info: structure which contains info about a current repo
|
||||||
|
:param repomd_records: list with repomd records
|
||||||
|
"""
|
||||||
|
for repomd_record in repomd_records:
|
||||||
|
if repomd_record.type != 'modules':
|
||||||
|
continue
|
||||||
|
repomd_record_file_path = os.path.join(
|
||||||
|
repo_info.path,
|
||||||
|
repo_info.folder,
|
||||||
|
repomd_record.location_href,
|
||||||
|
)
|
||||||
|
if repo_info.is_remote:
|
||||||
|
repomd_record_file_path = self.get_remote_file_content(
|
||||||
|
repomd_record_file_path)
|
||||||
|
return list(self._parse_modules_file(
|
||||||
|
repomd_record_file_path,
|
||||||
|
))
|
||||||
|
return []
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compare_pkgs_version(package_1: Package, package_2: Package) -> int:
|
||||||
|
version_tuple_1 = (
|
||||||
|
package_1.epoch,
|
||||||
|
package_1.version,
|
||||||
|
package_1.release,
|
||||||
|
)
|
||||||
|
version_tuple_2 = (
|
||||||
|
package_2.epoch,
|
||||||
|
package_2.version,
|
||||||
|
package_2.release,
|
||||||
|
)
|
||||||
|
return rpm.labelCompare(version_tuple_1, version_tuple_2)
|
||||||
|
|
||||||
|
def get_packages_iterator(
|
||||||
|
self,
|
||||||
|
repo_info: RepoInfo,
|
||||||
|
) -> Union[PackageIterator, Iterator]:
|
||||||
|
full_repo_path = self._get_full_repo_path(repo_info)
|
||||||
|
pkgs_iterator = self.pkgs.get(full_repo_path)
|
||||||
|
if pkgs_iterator is None:
|
||||||
|
repomd_records = self._get_repomd_records(
|
||||||
|
repo_info=repo_info,
|
||||||
|
)
|
||||||
|
repomd_records_dict = {} # type: Dict[str, str]
|
||||||
|
self._download_repomd_records(
|
||||||
|
repo_info=repo_info,
|
||||||
|
repomd_records=repomd_records,
|
||||||
|
repomd_records_dict=repomd_records_dict,
|
||||||
|
)
|
||||||
|
pkgs_iterator = PackageIterator(
|
||||||
|
primary_path=repomd_records_dict['primary'],
|
||||||
|
filelists_path=repomd_records_dict['filelists'],
|
||||||
|
other_path=repomd_records_dict['other'],
|
||||||
|
warningcb=self._warning_callback,
|
||||||
|
)
|
||||||
|
pkgs_iterator, self.pkgs[full_repo_path] = tee(pkgs_iterator)
|
||||||
|
return pkgs_iterator
|
||||||
|
|
||||||
|
def get_package_arch(
|
||||||
|
self,
|
||||||
|
package: Package,
|
||||||
|
variant_arch: str,
|
||||||
|
) -> str:
|
||||||
|
result = variant_arch
|
||||||
|
if package.arch in self.repo_arches[variant_arch]:
|
||||||
|
result = package.arch
|
||||||
|
return result
|
||||||
|
|
||||||
|
def is_skipped_module_package(
|
||||||
|
self,
|
||||||
|
package: Package,
|
||||||
|
variant_arch: str,
|
||||||
|
) -> bool:
|
||||||
|
package_key = self.get_package_key(package, variant_arch)
|
||||||
|
# Even a module package will be added to packages.json if
|
||||||
|
# it presents in the list of included packages
|
||||||
|
return 'module' in package.release and not any(
|
||||||
|
re.search(
|
||||||
|
f'^{included_pkg}$',
|
||||||
|
package_key,
|
||||||
|
) or included_pkg in (package.name, package_key)
|
||||||
|
for included_pkg in self.included_packages
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_excluded_package(
|
||||||
|
self,
|
||||||
|
package: Package,
|
||||||
|
variant_arch: str,
|
||||||
|
excluded_packages: List[str],
|
||||||
|
) -> bool:
|
||||||
|
package_key = self.get_package_key(package, variant_arch)
|
||||||
|
return any(
|
||||||
|
re.search(
|
||||||
|
f'^{excluded_pkg}$',
|
||||||
|
package_key,
|
||||||
|
) or excluded_pkg in (package.name, package_key)
|
||||||
|
for excluded_pkg in excluded_packages
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_source_rpm_name(package: Package) -> str:
|
||||||
|
source_rpm_nvra = parse_nvra(package.rpm_sourcerpm)
|
||||||
|
return source_rpm_nvra['name']
|
||||||
|
|
||||||
|
def get_package_key(self, package: Package, variant_arch: str) -> str:
|
||||||
|
return (
|
||||||
|
f'{package.name}.'
|
||||||
|
f'{self.get_package_arch(package, variant_arch)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_packages_json(
|
||||||
|
self
|
||||||
|
) -> Dict[AnyStr, Dict[AnyStr, Dict[AnyStr, List[AnyStr]]]]:
|
||||||
|
"""
|
||||||
|
Generate packages.json
|
||||||
|
"""
|
||||||
|
packages = defaultdict(lambda: defaultdict(lambda: {
|
||||||
|
'variants': list(),
|
||||||
|
}))
|
||||||
|
for variant_info in self.variants:
|
||||||
|
for repo_info in variant_info.repos:
|
||||||
|
is_reference = repo_info.is_reference
|
||||||
|
for package in self.get_packages_iterator(repo_info=repo_info):
|
||||||
|
if self.is_skipped_module_package(
|
||||||
|
package=package,
|
||||||
|
variant_arch=variant_info.arch,
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
if self.is_excluded_package(
|
||||||
|
package=package,
|
||||||
|
variant_arch=variant_info.arch,
|
||||||
|
excluded_packages=self.excluded_packages,
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
if self.is_excluded_package(
|
||||||
|
package=package,
|
||||||
|
variant_arch=variant_info.arch,
|
||||||
|
excluded_packages=variant_info.excluded_packages,
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
package_key = self.get_package_key(
|
||||||
|
package,
|
||||||
|
variant_info.arch,
|
||||||
|
)
|
||||||
|
source_rpm_name = self.get_source_rpm_name(package)
|
||||||
|
package_info = packages[source_rpm_name][package_key]
|
||||||
|
if 'is_reference' not in package_info:
|
||||||
|
package_info['variants'].append(variant_info.name)
|
||||||
|
package_info['is_reference'] = is_reference
|
||||||
|
package_info['package'] = package
|
||||||
|
elif not package_info['is_reference'] or \
|
||||||
|
package_info['is_reference'] == is_reference and \
|
||||||
|
self.compare_pkgs_version(
|
||||||
|
package_1=package,
|
||||||
|
package_2=package_info['package'],
|
||||||
|
) > 0:
|
||||||
|
package_info['variants'] = [variant_info.name]
|
||||||
|
package_info['is_reference'] = is_reference
|
||||||
|
package_info['package'] = package
|
||||||
|
elif self.compare_pkgs_version(
|
||||||
|
package_1=package,
|
||||||
|
package_2=package_info['package'],
|
||||||
|
) == 0 and repo_info.repo_type != 'absent':
|
||||||
|
package_info['variants'].append(variant_info.name)
|
||||||
|
result = defaultdict(lambda: defaultdict(
|
||||||
|
lambda: defaultdict(list),
|
||||||
|
))
|
||||||
|
for variant_info in self.variants:
|
||||||
|
for source_rpm_name, packages_info in packages.items():
|
||||||
|
for package_key, package_info in packages_info.items():
|
||||||
|
variant_pkgs = result[variant_info.name][variant_info.arch]
|
||||||
|
if variant_info.name not in package_info['variants']:
|
||||||
|
continue
|
||||||
|
variant_pkgs[source_rpm_name].append(package_key)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def create_parser():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument(
|
||||||
|
'-c',
|
||||||
|
'--config',
|
||||||
|
type=Path,
|
||||||
|
default=Path('config.yaml'),
|
||||||
|
required=False,
|
||||||
|
help='Path to a config',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-o',
|
||||||
|
'--json-output-path',
|
||||||
|
type=str,
|
||||||
|
help='Full path to output json file',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def read_config(config_path: Path) -> Optional[Dict]:
|
||||||
|
if not config_path.exists():
|
||||||
|
logging.error('A config by path "%s" does not exist', config_path)
|
||||||
|
exit(1)
|
||||||
|
with config_path.open('r') as config_fd:
|
||||||
|
return yaml.safe_load(config_fd)
|
||||||
|
|
||||||
|
|
||||||
|
def process_config(config_data: Dict) -> Tuple[
|
||||||
|
List[VariantInfo],
|
||||||
|
List[str],
|
||||||
|
List[str],
|
||||||
|
]:
|
||||||
|
excluded_packages = config_data.get('excluded_packages', [])
|
||||||
|
included_packages = config_data.get('included_packages', [])
|
||||||
|
variants = [VariantInfo(
|
||||||
|
name=variant_name,
|
||||||
|
arch=variant_info['arch'],
|
||||||
|
excluded_packages=variant_info.get('excluded_packages', []),
|
||||||
|
repos=[RepoInfo(
|
||||||
|
path=variant_repo['path'],
|
||||||
|
folder=variant_repo['folder'],
|
||||||
|
is_remote=variant_repo['remote'],
|
||||||
|
is_reference=variant_repo['reference'],
|
||||||
|
repo_type=variant_repo.get('repo_type', 'present'),
|
||||||
|
) for variant_repo in variant_info['repos']]
|
||||||
|
) for variant_name, variant_info in config_data['variants'].items()]
|
||||||
|
return variants, excluded_packages, included_packages
|
||||||
|
|
||||||
|
|
||||||
|
def cli_main():
|
||||||
|
args = create_parser().parse_args()
|
||||||
|
variants, excluded_packages, included_packages = process_config(
|
||||||
|
config_data=read_config(args.config)
|
||||||
|
)
|
||||||
|
pg = PackagesGenerator(
|
||||||
|
variants=variants,
|
||||||
|
excluded_packages=excluded_packages,
|
||||||
|
included_packages=included_packages,
|
||||||
|
)
|
||||||
|
result = pg.generate_packages_json()
|
||||||
|
with open(args.json_output_path, 'w') as packages_file:
|
||||||
|
json.dump(
|
||||||
|
result,
|
||||||
|
packages_file,
|
||||||
|
indent=4,
|
||||||
|
sort_keys=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
cli_main()
|
|
@ -14,6 +14,9 @@ def send(cmd, data):
|
||||||
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
topic = "compose.%s" % cmd.replace("-", ".").lower()
|
||||||
try:
|
try:
|
||||||
msg = fedora_messaging.api.Message(topic="pungi.{}".format(topic), body=data)
|
msg = fedora_messaging.api.Message(topic="pungi.{}".format(topic), body=data)
|
||||||
|
if cmd == "ostree":
|
||||||
|
# https://pagure.io/fedora-infrastructure/issue/10899
|
||||||
|
msg.priority = 3
|
||||||
fedora_messaging.api.publish(msg)
|
fedora_messaging.api.publish(msg)
|
||||||
except fedora_messaging.exceptions.PublishReturned as e:
|
except fedora_messaging.exceptions.PublishReturned as e:
|
||||||
print("Fedora Messaging broker rejected message %s: %s" % (msg.id, e))
|
print("Fedora Messaging broker rejected message %s: %s" % (msg.id, e))
|
||||||
|
|
|
@ -0,0 +1,255 @@
|
||||||
|
import gzip
|
||||||
|
import lzma
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser, FileType
|
||||||
|
from glob import iglob
|
||||||
|
from io import BytesIO
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, AnyStr, Iterable, Union, Optional
|
||||||
|
import logging
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
import createrepo_c as cr
|
||||||
|
from typing.io import BinaryIO
|
||||||
|
|
||||||
|
from .create_packages_json import PackagesGenerator, is_gzip_file, is_xz_file
|
||||||
|
|
||||||
|
EMPTY_FILE = '.empty'
|
||||||
|
|
||||||
|
|
||||||
|
def read_modules_yaml(modules_yaml_path: Union[str, Path]) -> BytesIO:
|
||||||
|
with open(modules_yaml_path, 'rb') as fp:
|
||||||
|
return BytesIO(fp.read())
|
||||||
|
|
||||||
|
|
||||||
|
def grep_list_of_modules_yaml(repos_path: AnyStr) -> Iterable[BytesIO]:
|
||||||
|
"""
|
||||||
|
Find all of valid *modules.yaml.gz in repos
|
||||||
|
:param repos_path: path to a directory which contains repo dirs
|
||||||
|
:return: iterable object of content from *modules.yaml.*
|
||||||
|
"""
|
||||||
|
|
||||||
|
return (
|
||||||
|
read_modules_yaml_from_specific_repo(repo_path=Path(path).parent)
|
||||||
|
for path in iglob(
|
||||||
|
str(Path(repos_path).joinpath('**/repodata')),
|
||||||
|
recursive=True
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_remote(path: str):
|
||||||
|
return any(str(path).startswith(protocol)
|
||||||
|
for protocol in ('http', 'https'))
|
||||||
|
|
||||||
|
|
||||||
|
def read_modules_yaml_from_specific_repo(
|
||||||
|
repo_path: Union[str, Path]
|
||||||
|
) -> Optional[BytesIO]:
|
||||||
|
"""
|
||||||
|
Read modules_yaml from a specific repo (remote or local)
|
||||||
|
:param repo_path: path/url to a specific repo
|
||||||
|
(final dir should contain dir `repodata`)
|
||||||
|
:return: iterable object of content from *modules.yaml.*
|
||||||
|
"""
|
||||||
|
|
||||||
|
if _is_remote(repo_path):
|
||||||
|
repomd_url = urljoin(
|
||||||
|
repo_path + '/',
|
||||||
|
'repodata/repomd.xml',
|
||||||
|
)
|
||||||
|
packages_generator = PackagesGenerator(
|
||||||
|
variants=[],
|
||||||
|
excluded_packages=[],
|
||||||
|
included_packages=[],
|
||||||
|
)
|
||||||
|
repomd_file_path = packages_generator.get_remote_file_content(
|
||||||
|
file_url=repomd_url
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
repomd_file_path = os.path.join(
|
||||||
|
repo_path,
|
||||||
|
'repodata/repomd.xml',
|
||||||
|
)
|
||||||
|
repomd_obj = cr.Repomd(str(repomd_file_path))
|
||||||
|
for record in repomd_obj.records:
|
||||||
|
if record.type != 'modules':
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if _is_remote(repo_path):
|
||||||
|
modules_yaml_url = urljoin(
|
||||||
|
repo_path + '/',
|
||||||
|
record.location_href,
|
||||||
|
)
|
||||||
|
packages_generator = PackagesGenerator(
|
||||||
|
variants=[],
|
||||||
|
excluded_packages=[],
|
||||||
|
included_packages=[],
|
||||||
|
)
|
||||||
|
modules_yaml_path = packages_generator.get_remote_file_content(
|
||||||
|
file_url=modules_yaml_url
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
modules_yaml_path = os.path.join(
|
||||||
|
repo_path,
|
||||||
|
record.location_href,
|
||||||
|
)
|
||||||
|
return read_modules_yaml(modules_yaml_path=modules_yaml_path)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _should_grep_defaults(
|
||||||
|
document_type: str,
|
||||||
|
grep_only_modules_data: bool = False,
|
||||||
|
grep_only_modules_defaults_data: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
xor_flag = grep_only_modules_data == grep_only_modules_defaults_data
|
||||||
|
if document_type == 'modulemd' and (xor_flag or grep_only_modules_data):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _should_grep_modules(
|
||||||
|
document_type: str,
|
||||||
|
grep_only_modules_data: bool = False,
|
||||||
|
grep_only_modules_defaults_data: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
xor_flag = grep_only_modules_data == grep_only_modules_defaults_data
|
||||||
|
if document_type == 'modulemd-defaults' and \
|
||||||
|
(xor_flag or grep_only_modules_defaults_data):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def collect_modules(
|
||||||
|
modules_paths: List[BinaryIO],
|
||||||
|
target_dir: str,
|
||||||
|
grep_only_modules_data: bool = False,
|
||||||
|
grep_only_modules_defaults_data: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Read given modules.yaml.gz files and export modules
|
||||||
|
and modulemd files from it.
|
||||||
|
Returns:
|
||||||
|
object:
|
||||||
|
"""
|
||||||
|
xor_flag = grep_only_modules_defaults_data is grep_only_modules_data
|
||||||
|
modules_path = os.path.join(target_dir, 'modules')
|
||||||
|
module_defaults_path = os.path.join(target_dir, 'module_defaults')
|
||||||
|
if grep_only_modules_data or xor_flag:
|
||||||
|
os.makedirs(modules_path, exist_ok=True)
|
||||||
|
if grep_only_modules_defaults_data or xor_flag:
|
||||||
|
os.makedirs(module_defaults_path, exist_ok=True)
|
||||||
|
# Defaults modules can be empty, but pungi detects
|
||||||
|
# empty folder while copying and raises the exception in this case
|
||||||
|
Path(os.path.join(module_defaults_path, EMPTY_FILE)).touch()
|
||||||
|
|
||||||
|
for module_file in modules_paths:
|
||||||
|
data = module_file.read()
|
||||||
|
if is_gzip_file(data[:2]):
|
||||||
|
data = gzip.decompress(data)
|
||||||
|
elif is_xz_file(data[:2]):
|
||||||
|
data = lzma.decompress(data)
|
||||||
|
documents = yaml.load_all(data, Loader=yaml.BaseLoader)
|
||||||
|
for doc in documents:
|
||||||
|
path = None
|
||||||
|
if _should_grep_modules(
|
||||||
|
doc['document'],
|
||||||
|
grep_only_modules_data,
|
||||||
|
grep_only_modules_defaults_data,
|
||||||
|
):
|
||||||
|
name = f"{doc['data']['module']}.yaml"
|
||||||
|
path = os.path.join(module_defaults_path, name)
|
||||||
|
logging.info('Found %s module defaults', name)
|
||||||
|
elif _should_grep_defaults(
|
||||||
|
doc['document'],
|
||||||
|
grep_only_modules_data,
|
||||||
|
grep_only_modules_defaults_data,
|
||||||
|
):
|
||||||
|
# pungi.phases.pkgset.sources.source_koji.get_koji_modules
|
||||||
|
stream = doc['data']['stream'].replace('-', '_')
|
||||||
|
doc_data = doc['data']
|
||||||
|
name = f"{doc_data['name']}-{stream}-" \
|
||||||
|
f"{doc_data['version']}.{doc_data['context']}"
|
||||||
|
arch_dir = os.path.join(
|
||||||
|
modules_path,
|
||||||
|
doc_data['arch']
|
||||||
|
)
|
||||||
|
os.makedirs(arch_dir, exist_ok=True)
|
||||||
|
path = os.path.join(
|
||||||
|
arch_dir,
|
||||||
|
name,
|
||||||
|
)
|
||||||
|
logging.info('Found module %s', name)
|
||||||
|
|
||||||
|
if 'artifacts' not in doc['data']:
|
||||||
|
logging.warning(
|
||||||
|
'RPM %s does not have explicit list of artifacts',
|
||||||
|
name
|
||||||
|
)
|
||||||
|
if path is not None:
|
||||||
|
with open(path, 'w') as f:
|
||||||
|
yaml.dump(doc, f, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def cli_main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
content_type_group = parser.add_mutually_exclusive_group(required=False)
|
||||||
|
content_type_group.add_argument(
|
||||||
|
'--get-only-modules-data',
|
||||||
|
action='store_true',
|
||||||
|
help='Parse and get only modules data',
|
||||||
|
)
|
||||||
|
content_type_group.add_argument(
|
||||||
|
'--get-only-modules-defaults-data',
|
||||||
|
action='store_true',
|
||||||
|
help='Parse and get only modules_defaults data',
|
||||||
|
)
|
||||||
|
path_group = parser.add_mutually_exclusive_group(required=True)
|
||||||
|
path_group.add_argument(
|
||||||
|
'-p', '--path',
|
||||||
|
type=FileType('rb'), nargs='+',
|
||||||
|
help='Path to modules.yaml.gz file. '
|
||||||
|
'You may pass multiple files by passing -p path1 path2'
|
||||||
|
)
|
||||||
|
path_group.add_argument(
|
||||||
|
'-rp', '--repo-path',
|
||||||
|
required=False,
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
help='Path to a directory which contains repodirs. E.g. /var/repos'
|
||||||
|
)
|
||||||
|
path_group.add_argument(
|
||||||
|
'-rd', '--repodata-paths',
|
||||||
|
required=False,
|
||||||
|
type=str,
|
||||||
|
nargs='+',
|
||||||
|
default=[],
|
||||||
|
help='Paths/urls to the directories with directory `repodata`',
|
||||||
|
)
|
||||||
|
parser.add_argument('-t', '--target', required=True)
|
||||||
|
|
||||||
|
namespace = parser.parse_args()
|
||||||
|
if namespace.repodata_paths:
|
||||||
|
modules = []
|
||||||
|
for repodata_path in namespace.repodata_paths:
|
||||||
|
modules.append(read_modules_yaml_from_specific_repo(
|
||||||
|
repodata_path,
|
||||||
|
))
|
||||||
|
elif namespace.path is not None:
|
||||||
|
modules = namespace.path
|
||||||
|
else:
|
||||||
|
modules = grep_list_of_modules_yaml(namespace.repo_path)
|
||||||
|
modules = list(filter(lambda i: i is not None, modules))
|
||||||
|
collect_modules(
|
||||||
|
modules,
|
||||||
|
namespace.target,
|
||||||
|
namespace.get_only_modules_data,
|
||||||
|
namespace.get_only_modules_defaults_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
cli_main()
|
|
@ -0,0 +1,96 @@
|
||||||
|
import re
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
import os
|
||||||
|
from glob import iglob
|
||||||
|
from typing import List
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from productmd.common import parse_nvra
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Package:
|
||||||
|
nvra: dict
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
|
||||||
|
def search_rpms(top_dir: Path) -> List[Package]:
|
||||||
|
"""
|
||||||
|
Search for all *.rpm files recursively
|
||||||
|
in given top directory
|
||||||
|
Returns:
|
||||||
|
list: list of paths
|
||||||
|
"""
|
||||||
|
return [Package(
|
||||||
|
nvra=parse_nvra(Path(path).stem),
|
||||||
|
path=Path(path),
|
||||||
|
) for path in iglob(str(top_dir.joinpath('**/*.rpm')), recursive=True)]
|
||||||
|
|
||||||
|
|
||||||
|
def is_excluded_package(
|
||||||
|
package: Package,
|
||||||
|
excluded_packages: List[str],
|
||||||
|
) -> bool:
|
||||||
|
package_key = f'{package.nvra["name"]}.{package.nvra["arch"]}'
|
||||||
|
return any(
|
||||||
|
re.search(
|
||||||
|
f'^{excluded_pkg}$',
|
||||||
|
package_key,
|
||||||
|
) or excluded_pkg in (package.nvra['name'], package_key)
|
||||||
|
for excluded_pkg in excluded_packages
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_rpms(
|
||||||
|
packages: List[Package],
|
||||||
|
target_top_dir: Path,
|
||||||
|
excluded_packages: List[str],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Search synced repos for rpms and prepare
|
||||||
|
koji-like structure for pungi
|
||||||
|
|
||||||
|
Instead of repos, use following structure:
|
||||||
|
# ls /mnt/koji/
|
||||||
|
i686/ noarch/ x86_64/
|
||||||
|
Returns:
|
||||||
|
Nothing:
|
||||||
|
"""
|
||||||
|
for package in packages:
|
||||||
|
if is_excluded_package(package, excluded_packages):
|
||||||
|
continue
|
||||||
|
target_arch_dir = target_top_dir.joinpath(package.nvra['arch'])
|
||||||
|
target_file = target_arch_dir.joinpath(package.path.name)
|
||||||
|
os.makedirs(target_arch_dir, exist_ok=True)
|
||||||
|
|
||||||
|
if not target_file.exists():
|
||||||
|
try:
|
||||||
|
os.link(package.path, target_file)
|
||||||
|
except OSError:
|
||||||
|
# hardlink failed, try symlinking
|
||||||
|
package.path.symlink_to(target_file)
|
||||||
|
|
||||||
|
|
||||||
|
def cli_main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument('-p', '--path', required=True, type=Path)
|
||||||
|
parser.add_argument('-t', '--target', required=True, type=Path)
|
||||||
|
parser.add_argument(
|
||||||
|
'-e',
|
||||||
|
'--excluded-packages',
|
||||||
|
required=False,
|
||||||
|
nargs='+',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
namespace = parser.parse_args()
|
||||||
|
|
||||||
|
rpms = search_rpms(namespace.path)
|
||||||
|
copy_rpms(rpms, namespace.target, namespace.excluded_packages)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
cli_main()
|
|
@ -319,7 +319,6 @@ def get_arguments(config):
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
config = pungi.config.Config()
|
config = pungi.config.Config()
|
||||||
opts = get_arguments(config)
|
opts = get_arguments(config)
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ from pungi.phases import PHASES_NAMES
|
||||||
from pungi import get_full_version, util
|
from pungi import get_full_version, util
|
||||||
from pungi.errors import UnsignedPackagesError
|
from pungi.errors import UnsignedPackagesError
|
||||||
from pungi.wrappers import kojiwrapper
|
from pungi.wrappers import kojiwrapper
|
||||||
|
from pungi.util import rmtree
|
||||||
|
|
||||||
|
|
||||||
# force C locales
|
# force C locales
|
||||||
|
@ -251,9 +252,15 @@ def main():
|
||||||
kobo.log.add_stderr_logger(logger)
|
kobo.log.add_stderr_logger(logger)
|
||||||
|
|
||||||
conf = util.load_config(opts.config)
|
conf = util.load_config(opts.config)
|
||||||
|
|
||||||
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
compose_type = opts.compose_type or conf.get("compose_type", "production")
|
||||||
if compose_type == "production" and not opts.label and not opts.no_label:
|
label = opts.label or conf.get("label")
|
||||||
|
if label:
|
||||||
|
try:
|
||||||
|
productmd.composeinfo.verify_label(label)
|
||||||
|
except ValueError as ex:
|
||||||
|
abort(str(ex))
|
||||||
|
|
||||||
|
if compose_type == "production" and not label and not opts.no_label:
|
||||||
abort("must specify label for a production compose")
|
abort("must specify label for a production compose")
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
@ -300,7 +307,12 @@ def main():
|
||||||
|
|
||||||
if opts.target_dir:
|
if opts.target_dir:
|
||||||
compose_dir = Compose.get_compose_dir(
|
compose_dir = Compose.get_compose_dir(
|
||||||
opts.target_dir, conf, compose_type=compose_type, compose_label=opts.label
|
opts.target_dir,
|
||||||
|
conf,
|
||||||
|
compose_type=compose_type,
|
||||||
|
compose_label=label,
|
||||||
|
parent_compose_ids=opts.parent_compose_id,
|
||||||
|
respin_of=opts.respin_of,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
compose_dir = opts.compose_dir
|
compose_dir = opts.compose_dir
|
||||||
|
@ -309,7 +321,7 @@ def main():
|
||||||
ci = Compose.get_compose_info(
|
ci = Compose.get_compose_info(
|
||||||
conf,
|
conf,
|
||||||
compose_type=compose_type,
|
compose_type=compose_type,
|
||||||
compose_label=opts.label,
|
compose_label=label,
|
||||||
parent_compose_ids=opts.parent_compose_id,
|
parent_compose_ids=opts.parent_compose_id,
|
||||||
respin_of=opts.respin_of,
|
respin_of=opts.respin_of,
|
||||||
)
|
)
|
||||||
|
@ -380,6 +392,14 @@ def run_compose(
|
||||||
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
compose.log_info("Current timezone offset: %s" % pungi.util.get_tz_offset())
|
||||||
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
compose.log_info("COMPOSE_ID=%s" % compose.compose_id)
|
||||||
|
|
||||||
|
installed_pkgs_log = compose.paths.log.log_file("global", "installed-pkgs")
|
||||||
|
compose.log_info("Logging installed packages to %s" % installed_pkgs_log)
|
||||||
|
try:
|
||||||
|
with open(installed_pkgs_log, "w") as f:
|
||||||
|
subprocess.Popen(["rpm", "-qa"], stdout=f)
|
||||||
|
except Exception as e:
|
||||||
|
compose.log_warning("Failed to log installed packages: %s" % str(e))
|
||||||
|
|
||||||
compose.read_variants()
|
compose.read_variants()
|
||||||
|
|
||||||
# dump the config file
|
# dump the config file
|
||||||
|
@ -671,7 +691,7 @@ def cli_main():
|
||||||
except (Exception, KeyboardInterrupt) as ex:
|
except (Exception, KeyboardInterrupt) as ex:
|
||||||
if COMPOSE:
|
if COMPOSE:
|
||||||
COMPOSE.log_error("Compose run failed: %s" % ex)
|
COMPOSE.log_error("Compose run failed: %s" % ex)
|
||||||
COMPOSE.traceback()
|
COMPOSE.traceback(show_locals=getattr(ex, "show_locals", True))
|
||||||
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
COMPOSE.log_critical("Compose failed: %s" % COMPOSE.topdir)
|
||||||
COMPOSE.write_status("DOOMED")
|
COMPOSE.write_status("DOOMED")
|
||||||
else:
|
else:
|
||||||
|
@ -680,3 +700,8 @@ def cli_main():
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
finally:
|
||||||
|
# Remove repositories cloned during ExtraFiles phase
|
||||||
|
process_id = os.getpid()
|
||||||
|
directoy_to_remove = "/tmp/pungi-temp-git-repos-" + str(process_id) + "/"
|
||||||
|
rmtree(directoy_to_remove)
|
||||||
|
|
|
@ -279,7 +279,7 @@ class GitUrlResolveError(RuntimeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def resolve_git_ref(repourl, ref):
|
def resolve_git_ref(repourl, ref, credential_helper=None):
|
||||||
"""Resolve a reference in a Git repo to a commit.
|
"""Resolve a reference in a Git repo to a commit.
|
||||||
|
|
||||||
Raises RuntimeError if there was an error. Most likely cause is failure to
|
Raises RuntimeError if there was an error. Most likely cause is failure to
|
||||||
|
@ -289,7 +289,7 @@ def resolve_git_ref(repourl, ref):
|
||||||
# This looks like a commit ID already.
|
# This looks like a commit ID already.
|
||||||
return ref
|
return ref
|
||||||
try:
|
try:
|
||||||
_, output = git_ls_remote(repourl, ref)
|
_, output = git_ls_remote(repourl, ref, credential_helper)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
raise GitUrlResolveError(
|
raise GitUrlResolveError(
|
||||||
"ref does not exist in remote repo %s with the error %s %s"
|
"ref does not exist in remote repo %s with the error %s %s"
|
||||||
|
@ -316,7 +316,7 @@ def resolve_git_ref(repourl, ref):
|
||||||
return lines[0].split()[0]
|
return lines[0].split()[0]
|
||||||
|
|
||||||
|
|
||||||
def resolve_git_url(url):
|
def resolve_git_url(url, credential_helper=None):
|
||||||
"""Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,
|
"""Given a url to a Git repo specifying HEAD or origin/<branch> as a ref,
|
||||||
replace that specifier with actual SHA1 of the commit.
|
replace that specifier with actual SHA1 of the commit.
|
||||||
|
|
||||||
|
@ -335,7 +335,7 @@ def resolve_git_url(url):
|
||||||
scheme = r.scheme.replace("git+", "")
|
scheme = r.scheme.replace("git+", "")
|
||||||
|
|
||||||
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
baseurl = urllib.parse.urlunsplit((scheme, r.netloc, r.path, "", ""))
|
||||||
fragment = resolve_git_ref(baseurl, ref)
|
fragment = resolve_git_ref(baseurl, ref, credential_helper)
|
||||||
|
|
||||||
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
result = urllib.parse.urlunsplit((r.scheme, r.netloc, r.path, r.query, fragment))
|
||||||
if "?#" in url:
|
if "?#" in url:
|
||||||
|
@ -354,13 +354,18 @@ class GitUrlResolver(object):
|
||||||
self.offline = offline
|
self.offline = offline
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def __call__(self, url, branch=None):
|
def __call__(self, url, branch=None, options=None):
|
||||||
|
credential_helper = options.get("credential_helper") if options else None
|
||||||
if self.offline:
|
if self.offline:
|
||||||
return branch or url
|
return branch or url
|
||||||
key = (url, branch)
|
key = (url, branch)
|
||||||
if key not in self.cache:
|
if key not in self.cache:
|
||||||
try:
|
try:
|
||||||
res = resolve_git_ref(url, branch) if branch else resolve_git_url(url)
|
res = (
|
||||||
|
resolve_git_ref(url, branch, credential_helper)
|
||||||
|
if branch
|
||||||
|
else resolve_git_url(url, credential_helper)
|
||||||
|
)
|
||||||
self.cache[key] = res
|
self.cache[key] = res
|
||||||
except GitUrlResolveError as exc:
|
except GitUrlResolveError as exc:
|
||||||
self.cache[key] = exc
|
self.cache[key] = exc
|
||||||
|
@ -456,6 +461,9 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
||||||
if not variant_uid and "%(variant)s" in i:
|
if not variant_uid and "%(variant)s" in i:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
|
# fmt: off
|
||||||
|
# Black wants to add a comma after kwargs, but that's not valid in
|
||||||
|
# Python 2.7
|
||||||
args = get_format_substs(
|
args = get_format_substs(
|
||||||
compose,
|
compose,
|
||||||
variant=variant_uid,
|
variant=variant_uid,
|
||||||
|
@ -467,6 +475,7 @@ def get_volid(compose, arch, variant=None, disc_type=False, formats=None, **kwar
|
||||||
base_product_version=base_product_version,
|
base_product_version=base_product_version,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
|
# fmt: on
|
||||||
volid = (i % args).format(**args)
|
volid = (i % args).format(**args)
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
|
@ -991,8 +1000,12 @@ def retry(timeout=120, interval=30, wait_on=Exception):
|
||||||
|
|
||||||
|
|
||||||
@retry(wait_on=RuntimeError)
|
@retry(wait_on=RuntimeError)
|
||||||
def git_ls_remote(baseurl, ref):
|
def git_ls_remote(baseurl, ref, credential_helper=None):
|
||||||
return run(["git", "ls-remote", baseurl, ref], universal_newlines=True)
|
cmd = ["git"]
|
||||||
|
if credential_helper:
|
||||||
|
cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||||
|
cmd.extend(["-c", "credential.helper=%s" % credential_helper])
|
||||||
|
return run(cmd + ["ls-remote", baseurl, ref], universal_newlines=True)
|
||||||
|
|
||||||
|
|
||||||
def get_tz_offset():
|
def get_tz_offset():
|
||||||
|
@ -1137,3 +1150,16 @@ def read_json_file(file_path):
|
||||||
"""A helper function to read a JSON file."""
|
"""A helper function to read a JSON file."""
|
||||||
with open(file_path) as f:
|
with open(file_path) as f:
|
||||||
return json.load(f)
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
UNITS = ["", "Ki", "Mi", "Gi", "Ti"]
|
||||||
|
|
||||||
|
|
||||||
|
def format_size(sz):
|
||||||
|
sz = float(sz)
|
||||||
|
unit = 0
|
||||||
|
while sz > 1024:
|
||||||
|
sz /= 1024
|
||||||
|
unit += 1
|
||||||
|
|
||||||
|
return "%.3g %sB" % (sz, UNITS[unit])
|
||||||
|
|
|
@ -183,15 +183,16 @@ class CompsFilter(object):
|
||||||
"""
|
"""
|
||||||
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
all_groups = self.tree.xpath("/comps/group/id/text()") + lookaside_groups
|
||||||
for environment in self.tree.xpath("/comps/environment"):
|
for environment in self.tree.xpath("/comps/environment"):
|
||||||
for group in environment.xpath("grouplist/groupid"):
|
for parent_tag in ("grouplist", "optionlist"):
|
||||||
if group.text not in all_groups:
|
for group in environment.xpath("%s/groupid" % parent_tag):
|
||||||
group.getparent().remove(group)
|
if group.text not in all_groups:
|
||||||
|
group.getparent().remove(group)
|
||||||
|
|
||||||
for group in environment.xpath("grouplist/groupid[@arch]"):
|
for group in environment.xpath("%s/groupid[@arch]" % parent_tag):
|
||||||
value = group.attrib.get("arch")
|
value = group.attrib.get("arch")
|
||||||
values = [v for v in re.split(r"[, ]+", value) if v]
|
values = [v for v in re.split(r"[, ]+", value) if v]
|
||||||
if arch not in values:
|
if arch not in values:
|
||||||
group.getparent().remove(group)
|
group.getparent().remove(group)
|
||||||
|
|
||||||
def remove_empty_environments(self):
|
def remove_empty_environments(self):
|
||||||
"""
|
"""
|
||||||
|
@ -305,6 +306,8 @@ class CompsWrapper(object):
|
||||||
append_common_info(doc, group_node, group, force_description=True)
|
append_common_info(doc, group_node, group, force_description=True)
|
||||||
append_bool(doc, group_node, "default", group.default)
|
append_bool(doc, group_node, "default", group.default)
|
||||||
append_bool(doc, group_node, "uservisible", group.uservisible)
|
append_bool(doc, group_node, "uservisible", group.uservisible)
|
||||||
|
if group.display_order is not None:
|
||||||
|
append(doc, group_node, "display_order", str(group.display_order))
|
||||||
|
|
||||||
if group.lang_only:
|
if group.lang_only:
|
||||||
append(doc, group_node, "langonly", group.lang_only)
|
append(doc, group_node, "langonly", group.lang_only)
|
||||||
|
|
|
@ -88,5 +88,12 @@ def parse_output(output):
|
||||||
packages.add((name, arch, frozenset(flags)))
|
packages.add((name, arch, frozenset(flags)))
|
||||||
else:
|
else:
|
||||||
name, arch = nevra.rsplit(".", 1)
|
name, arch = nevra.rsplit(".", 1)
|
||||||
modules.add(name.split(":", 1)[1])
|
# replace dash by underscore in stream of module's nerva
|
||||||
|
# source of name looks like
|
||||||
|
# module:llvm-toolset:rhel8:8040020210411062713:9f9e2e7e.x86_64
|
||||||
|
name = ':'.join(
|
||||||
|
item.replace('-', '_') if i == 1 else item for
|
||||||
|
i, item in enumerate(name.split(':')[1:])
|
||||||
|
)
|
||||||
|
modules.add(name)
|
||||||
return packages, modules
|
return packages, modules
|
||||||
|
|
|
@ -260,20 +260,23 @@ def get_isohybrid_cmd(iso_path, arch):
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
def get_manifest_cmd(iso_name, xorriso=False):
|
def get_manifest_cmd(iso_name, xorriso=False, output_file=None):
|
||||||
|
if not output_file:
|
||||||
|
output_file = "%s.manifest" % iso_name
|
||||||
|
|
||||||
if xorriso:
|
if xorriso:
|
||||||
return """xorriso -dev %s --find |
|
return """xorriso -dev %s --find |
|
||||||
tail -n+2 |
|
tail -n+2 |
|
||||||
tr -d "'" |
|
tr -d "'" |
|
||||||
cut -c2- |
|
cut -c2- |
|
||||||
sort >> %s.manifest""" % (
|
sort >> %s""" % (
|
||||||
shlex_quote(iso_name),
|
|
||||||
shlex_quote(iso_name),
|
shlex_quote(iso_name),
|
||||||
|
shlex_quote(output_file),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s.manifest" % (
|
return "isoinfo -R -f -i %s | grep -v '/TRANS.TBL$' | sort >> %s" % (
|
||||||
shlex_quote(iso_name),
|
|
||||||
shlex_quote(iso_name),
|
shlex_quote(iso_name),
|
||||||
|
shlex_quote(output_file),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,299 @@
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from attr import dataclass
|
||||||
|
from kobo.rpmlib import parse_nvra
|
||||||
|
|
||||||
|
from pungi.module_util import Modulemd
|
||||||
|
|
||||||
|
# just a random value which we don't
|
||||||
|
# use in mock currently
|
||||||
|
# originally builds are filtered by this value
|
||||||
|
# to get consistent snapshot of tags and packages
|
||||||
|
from pungi.scripts.gather_rpms import search_rpms
|
||||||
|
|
||||||
|
LAST_EVENT_ID = 999999
|
||||||
|
# last event time is not important but build
|
||||||
|
# time should be less then it
|
||||||
|
LAST_EVENT_TIME = time.time()
|
||||||
|
BUILD_TIME = 0
|
||||||
|
# virtual build that collects all
|
||||||
|
# packages built for some arch
|
||||||
|
RELEASE_BUILD_ID = 15270
|
||||||
|
# tag that should have all packages available
|
||||||
|
ALL_PACKAGES_TAG = 'dist-c8-compose'
|
||||||
|
# tag that should have all modules available
|
||||||
|
ALL_MODULES_TAG = 'dist-c8-module-compose'
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Module:
|
||||||
|
build_id: int
|
||||||
|
name: str
|
||||||
|
nvr: str
|
||||||
|
stream: str
|
||||||
|
version: str
|
||||||
|
context: str
|
||||||
|
arch: str
|
||||||
|
|
||||||
|
|
||||||
|
class KojiMock:
|
||||||
|
"""
|
||||||
|
Class that acts like real koji (for some needed methods)
|
||||||
|
but uses local storage as data source
|
||||||
|
"""
|
||||||
|
def __init__(self, packages_dir, modules_dir, all_arches):
|
||||||
|
self._modules = self._gather_modules(modules_dir)
|
||||||
|
self._modules_dir = modules_dir
|
||||||
|
self._packages_dir = packages_dir
|
||||||
|
self._all_arches = all_arches
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _gather_modules(modules_dir):
|
||||||
|
modules = {}
|
||||||
|
for index, (f, arch) in enumerate(
|
||||||
|
(sub_path.name, sub_path.parent.name)
|
||||||
|
for path in Path(modules_dir).glob('*')
|
||||||
|
for sub_path in path.iterdir()
|
||||||
|
):
|
||||||
|
parsed = parse_nvra(f)
|
||||||
|
modules[index] = Module(
|
||||||
|
name=parsed['name'],
|
||||||
|
nvr=f,
|
||||||
|
version=parsed['release'],
|
||||||
|
context=parsed['arch'],
|
||||||
|
stream=parsed['version'],
|
||||||
|
build_id=index,
|
||||||
|
arch=arch,
|
||||||
|
)
|
||||||
|
return modules
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getLastEvent(*args, **kwargs):
|
||||||
|
return {'id': LAST_EVENT_ID, 'ts': LAST_EVENT_TIME}
|
||||||
|
|
||||||
|
def listTagged(self, tag_name, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Returns list of virtual 'builds' that contain packages by given tag
|
||||||
|
There are two kinds of tags: modular and distributive.
|
||||||
|
For now, only one kind, distributive one, is needed.
|
||||||
|
"""
|
||||||
|
if tag_name != ALL_MODULES_TAG:
|
||||||
|
raise ValueError("I don't know what tag is %s" % tag_name)
|
||||||
|
|
||||||
|
builds = []
|
||||||
|
for module in self._modules.values():
|
||||||
|
builds.append({
|
||||||
|
'build_id': module.build_id,
|
||||||
|
'owner_name': 'centos',
|
||||||
|
'package_name': module.name,
|
||||||
|
'nvr': module.nvr,
|
||||||
|
'version': module.stream,
|
||||||
|
'release': '%s.%s' % (module.version, module.context),
|
||||||
|
'name': module.name,
|
||||||
|
'id': module.build_id,
|
||||||
|
'tag_name': tag_name,
|
||||||
|
'arch': module.arch,
|
||||||
|
# Following fields are currently not
|
||||||
|
# used but returned by real koji
|
||||||
|
# left them here just for reference
|
||||||
|
#
|
||||||
|
# 'task_id': None,
|
||||||
|
# 'state': 1,
|
||||||
|
# 'start_time': '2020-12-23 16:43:59',
|
||||||
|
# 'creation_event_id': 309485,
|
||||||
|
# 'creation_time': '2020-12-23 17:05:33.553748',
|
||||||
|
# 'epoch': None, 'tag_id': 533,
|
||||||
|
# 'completion_time': '2020-12-23 17:05:23',
|
||||||
|
# 'volume_id': 0,
|
||||||
|
# 'package_id': 3221,
|
||||||
|
# 'owner_id': 11,
|
||||||
|
# 'volume_name': 'DEFAULT',
|
||||||
|
})
|
||||||
|
|
||||||
|
return builds
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getFullInheritance(*args, **kwargs):
|
||||||
|
"""
|
||||||
|
Unneeded because we use local storage.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def getBuild(self, build_id, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Used to get information about build
|
||||||
|
(used in pungi only for modules currently)
|
||||||
|
"""
|
||||||
|
module = self._modules[build_id]
|
||||||
|
|
||||||
|
result = {
|
||||||
|
'id': build_id,
|
||||||
|
'name': module.name,
|
||||||
|
'version': module.stream,
|
||||||
|
'release': '%s.%s' % (module.version, module.context),
|
||||||
|
'completion_ts': BUILD_TIME,
|
||||||
|
'state': 'COMPLETE',
|
||||||
|
'arch': module.arch,
|
||||||
|
'extra': {
|
||||||
|
'typeinfo': {
|
||||||
|
'module': {
|
||||||
|
'stream': module.stream,
|
||||||
|
'version': module.version,
|
||||||
|
'name': module.name,
|
||||||
|
'context': module.context,
|
||||||
|
'content_koji_tag': '-'.join([
|
||||||
|
module.name,
|
||||||
|
module.stream,
|
||||||
|
module.version
|
||||||
|
]) + '.' + module.context
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
def listArchives(self, build_id, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Originally lists artifacts for build, but in pungi used
|
||||||
|
only to get list of modulemd files for some module
|
||||||
|
"""
|
||||||
|
module = self._modules[build_id]
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
'build_id': module.build_id,
|
||||||
|
'filename': f'modulemd.{module.arch}.txt',
|
||||||
|
'btype': 'module'
|
||||||
|
},
|
||||||
|
# noone ever uses this file
|
||||||
|
# but it should be because pungi ignores builds
|
||||||
|
# with len(files) <= 1
|
||||||
|
{
|
||||||
|
'build_id': module.build_id,
|
||||||
|
'filename': 'modulemd.txt',
|
||||||
|
'btype': 'module'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def listTaggedRPMS(self, tag_name, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Get information about packages that are tagged by tag.
|
||||||
|
There are two kings of tags: per-module and per-distr.
|
||||||
|
"""
|
||||||
|
if tag_name == ALL_PACKAGES_TAG:
|
||||||
|
builds, packages = self._get_release_packages()
|
||||||
|
else:
|
||||||
|
builds, packages = self._get_module_packages(tag_name)
|
||||||
|
return [
|
||||||
|
packages,
|
||||||
|
builds
|
||||||
|
]
|
||||||
|
|
||||||
|
def _get_release_packages(self):
|
||||||
|
"""
|
||||||
|
Search packages dir and keep only
|
||||||
|
packages that are non-modular.
|
||||||
|
|
||||||
|
This is quite the way how real koji works:
|
||||||
|
- modular packages are tagged by module-* tag
|
||||||
|
- all other packages are tagged with dist* tag
|
||||||
|
"""
|
||||||
|
packages = []
|
||||||
|
|
||||||
|
# get all rpms in folder
|
||||||
|
rpms = search_rpms(Path(self._packages_dir))
|
||||||
|
|
||||||
|
for rpm in rpms:
|
||||||
|
info = parse_nvra(rpm.path.stem)
|
||||||
|
if 'module' in info['release']:
|
||||||
|
continue
|
||||||
|
packages.append({
|
||||||
|
"build_id": RELEASE_BUILD_ID,
|
||||||
|
"name": info['name'],
|
||||||
|
"extra": None,
|
||||||
|
"arch": info['arch'],
|
||||||
|
"epoch": info['epoch'] or None,
|
||||||
|
"version": info['version'],
|
||||||
|
"metadata_only": False,
|
||||||
|
"release": info['release'],
|
||||||
|
# not used currently
|
||||||
|
# "id": 262555,
|
||||||
|
# "size": 0
|
||||||
|
})
|
||||||
|
builds = []
|
||||||
|
return builds, packages
|
||||||
|
|
||||||
|
def _get_module_packages(self, tag_name):
|
||||||
|
"""
|
||||||
|
Get list of builds for module and given module tag name.
|
||||||
|
"""
|
||||||
|
builds = []
|
||||||
|
packages = []
|
||||||
|
modules = self._get_modules_by_name(tag_name)
|
||||||
|
for module in modules:
|
||||||
|
if module is None:
|
||||||
|
raise ValueError('Module %s is not found' % tag_name)
|
||||||
|
path = os.path.join(
|
||||||
|
self._modules_dir,
|
||||||
|
module.arch,
|
||||||
|
tag_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
builds.append({
|
||||||
|
"build_id": module.build_id,
|
||||||
|
"package_name": module.name,
|
||||||
|
"nvr": module.nvr,
|
||||||
|
"tag_name": module.nvr,
|
||||||
|
"version": module.stream,
|
||||||
|
"release": module.version,
|
||||||
|
"id": module.build_id,
|
||||||
|
"name": module.name,
|
||||||
|
"volume_name": "DEFAULT",
|
||||||
|
# Following fields are currently not
|
||||||
|
# used but returned by real koji
|
||||||
|
# left them here just for reference
|
||||||
|
#
|
||||||
|
# "owner_name": "mbox-mbs-backend",
|
||||||
|
# "task_id": 195937,
|
||||||
|
# "state": 1,
|
||||||
|
# "start_time": "2020-12-22 19:20:12.504578",
|
||||||
|
# "creation_event_id": 306731,
|
||||||
|
# "creation_time": "2020-12-22 19:20:12.504578",
|
||||||
|
# "epoch": None,
|
||||||
|
# "tag_id": 1192,
|
||||||
|
# "completion_time": "2020-12-22 19:34:34.716615",
|
||||||
|
# "volume_id": 0,
|
||||||
|
# "package_id": 104,
|
||||||
|
# "owner_id": 6,
|
||||||
|
})
|
||||||
|
|
||||||
|
if os.path.exists(path):
|
||||||
|
info = Modulemd.ModuleStream.read_string(open(path).read(), strict=True)
|
||||||
|
for art in info.get_rpm_artifacts():
|
||||||
|
data = parse_nvra(art)
|
||||||
|
packages.append({
|
||||||
|
"build_id": module.build_id,
|
||||||
|
"name": data['name'],
|
||||||
|
"extra": None,
|
||||||
|
"arch": data['arch'],
|
||||||
|
"epoch": data['epoch'] or None,
|
||||||
|
"version": data['version'],
|
||||||
|
"metadata_only": False,
|
||||||
|
"release": data['release'],
|
||||||
|
"id": 262555,
|
||||||
|
"size": 0
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Unable to find module %s' % path)
|
||||||
|
return builds, packages
|
||||||
|
|
||||||
|
def _get_modules_by_name(self, tag_name):
|
||||||
|
modules = []
|
||||||
|
for arch in self._all_arches:
|
||||||
|
for module in self._modules.values():
|
||||||
|
if module.nvr != tag_name or module.arch != arch:
|
||||||
|
continue
|
||||||
|
modules.append(module)
|
||||||
|
return modules
|
|
@ -14,18 +14,25 @@
|
||||||
# along with this program; if not, see <https://gnu.org/licenses/>.
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
||||||
|
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import socket
|
||||||
|
import shutil
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import contextlib
|
|
||||||
|
import requests
|
||||||
|
|
||||||
import koji
|
import koji
|
||||||
from kobo.shortcuts import run, force_list
|
from kobo.shortcuts import run, force_list
|
||||||
import six
|
import six
|
||||||
from six.moves import configparser, shlex_quote
|
from six.moves import configparser, shlex_quote
|
||||||
import six.moves.xmlrpc_client as xmlrpclib
|
import six.moves.xmlrpc_client as xmlrpclib
|
||||||
|
from flufl.lock import Lock
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from .kojimock import KojiMock
|
||||||
from .. import util
|
from .. import util
|
||||||
from ..arch_utils import getBaseArch
|
from ..arch_utils import getBaseArch
|
||||||
|
|
||||||
|
@ -785,11 +792,10 @@ class KojiWrapper(object):
|
||||||
if list_of_args is None and list_of_kwargs is None:
|
if list_of_args is None and list_of_kwargs is None:
|
||||||
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
raise ValueError("One of list_of_args or list_of_kwargs must be set.")
|
||||||
|
|
||||||
if type(list_of_args) not in [type(None), list] or type(list_of_kwargs) not in [
|
if list_of_args is not None and not isinstance(list_of_args, list):
|
||||||
type(None),
|
raise ValueError("list_of_args must be list or None.")
|
||||||
list,
|
if list_of_kwargs is not None and not isinstance(list_of_kwargs, list):
|
||||||
]:
|
raise ValueError("list_of_kwargs must be list or None.")
|
||||||
raise ValueError("list_of_args and list_of_kwargs must be list or None.")
|
|
||||||
|
|
||||||
if list_of_kwargs is None:
|
if list_of_kwargs is None:
|
||||||
list_of_kwargs = [{}] * len(list_of_args)
|
list_of_kwargs = [{}] * len(list_of_args)
|
||||||
|
@ -803,9 +809,9 @@ class KojiWrapper(object):
|
||||||
|
|
||||||
koji_session.multicall = True
|
koji_session.multicall = True
|
||||||
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
for args, kwargs in zip(list_of_args, list_of_kwargs):
|
||||||
if type(args) != list:
|
if not isinstance(args, list):
|
||||||
args = [args]
|
args = [args]
|
||||||
if type(kwargs) != dict:
|
if not isinstance(kwargs, dict):
|
||||||
raise ValueError("Every item in list_of_kwargs must be a dict")
|
raise ValueError("Every item in list_of_kwargs must be a dict")
|
||||||
koji_session_fnc(*args, **kwargs)
|
koji_session_fnc(*args, **kwargs)
|
||||||
|
|
||||||
|
@ -813,7 +819,7 @@ class KojiWrapper(object):
|
||||||
|
|
||||||
if not responses:
|
if not responses:
|
||||||
return None
|
return None
|
||||||
if type(responses) != list:
|
if not isinstance(responses, list):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Fault element was returned for multicall of method %r: %r"
|
"Fault element was returned for multicall of method %r: %r"
|
||||||
% (koji_session_fnc, responses)
|
% (koji_session_fnc, responses)
|
||||||
|
@ -829,7 +835,7 @@ class KojiWrapper(object):
|
||||||
# a one-item array containing the result value,
|
# a one-item array containing the result value,
|
||||||
# or a struct of the form found inside the standard <fault> element.
|
# or a struct of the form found inside the standard <fault> element.
|
||||||
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):
|
||||||
if type(response) == list:
|
if isinstance(response, list):
|
||||||
if not response:
|
if not response:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
"Empty list returned for multicall of method %r with args %r, %r" # noqa: E501
|
||||||
|
@ -864,6 +870,45 @@ class KojiWrapper(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class KojiMockWrapper(object):
|
||||||
|
lock = threading.Lock()
|
||||||
|
|
||||||
|
def __init__(self, compose, all_arches):
|
||||||
|
self.all_arches = all_arches
|
||||||
|
self.compose = compose
|
||||||
|
try:
|
||||||
|
self.profile = self.compose.conf["koji_profile"]
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError("Koji profile must be configured")
|
||||||
|
with self.lock:
|
||||||
|
self.koji_module = koji.get_profile_module(self.profile)
|
||||||
|
session_opts = {}
|
||||||
|
for key in (
|
||||||
|
"timeout",
|
||||||
|
"keepalive",
|
||||||
|
"max_retries",
|
||||||
|
"retry_interval",
|
||||||
|
"anon_retry",
|
||||||
|
"offline_retry",
|
||||||
|
"offline_retry_interval",
|
||||||
|
"debug",
|
||||||
|
"debug_xmlrpc",
|
||||||
|
"serverca",
|
||||||
|
"use_fast_upload",
|
||||||
|
):
|
||||||
|
value = getattr(self.koji_module.config, key, None)
|
||||||
|
if value is not None:
|
||||||
|
session_opts[key] = value
|
||||||
|
self.koji_proxy = KojiMock(
|
||||||
|
packages_dir=self.koji_module.config.topdir,
|
||||||
|
modules_dir=os.path.join(
|
||||||
|
self.koji_module.config.topdir,
|
||||||
|
'modules',
|
||||||
|
),
|
||||||
|
all_arches=self.all_arches,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_buildroot_rpms(compose, task_id):
|
def get_buildroot_rpms(compose, task_id):
|
||||||
"""Get build root RPMs - either from runroot or local"""
|
"""Get build root RPMs - either from runroot or local"""
|
||||||
result = []
|
result = []
|
||||||
|
@ -895,3 +940,176 @@ def get_buildroot_rpms(compose, task_id):
|
||||||
continue
|
continue
|
||||||
result.append(i)
|
result.append(i)
|
||||||
return sorted(result)
|
return sorted(result)
|
||||||
|
|
||||||
|
|
||||||
|
class KojiDownloadProxy:
|
||||||
|
def __init__(self, topdir, topurl, cache_dir, logger):
|
||||||
|
if not topdir:
|
||||||
|
# This will only happen if there is either no koji_profile
|
||||||
|
# configured, or the profile doesn't have a topdir. In the first
|
||||||
|
# case there will be no koji interaction, and the second indicates
|
||||||
|
# broken koji configuration.
|
||||||
|
# We can pretend to have local access in both cases to avoid any
|
||||||
|
# external requests.
|
||||||
|
self.has_local_access = True
|
||||||
|
return
|
||||||
|
|
||||||
|
self.cache_dir = cache_dir
|
||||||
|
self.logger = logger
|
||||||
|
|
||||||
|
self.topdir = topdir
|
||||||
|
self.topurl = topurl
|
||||||
|
|
||||||
|
# If cache directory is configured, we want to use it (even if we
|
||||||
|
# actually have local access to the storage).
|
||||||
|
self.has_local_access = not bool(cache_dir)
|
||||||
|
# This is used for temporary downloaded files. The suffix is unique
|
||||||
|
# per-process. To prevent threads in the same process from colliding, a
|
||||||
|
# thread id is added later.
|
||||||
|
self.unique_suffix = "%s.%s" % (socket.gethostname(), os.getpid())
|
||||||
|
self.session = None
|
||||||
|
if not self.has_local_access:
|
||||||
|
self.session = requests.Session()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_prefix(self):
|
||||||
|
dir = self.topdir if self.has_local_access else self.cache_dir
|
||||||
|
return dir.rstrip("/") + "/"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(klass, conf, logger):
|
||||||
|
topdir = None
|
||||||
|
topurl = None
|
||||||
|
cache_dir = None
|
||||||
|
if "koji_profile" in conf:
|
||||||
|
koji_module = koji.get_profile_module(conf["koji_profile"])
|
||||||
|
topdir = koji_module.config.topdir
|
||||||
|
topurl = koji_module.config.topurl
|
||||||
|
|
||||||
|
cache_dir = conf.get("koji_cache")
|
||||||
|
if cache_dir:
|
||||||
|
cache_dir = cache_dir.rstrip("/") + "/"
|
||||||
|
return klass(topdir, topurl, cache_dir, logger)
|
||||||
|
|
||||||
|
@util.retry(wait_on=requests.exceptions.RequestException)
|
||||||
|
def _download(self, url, dest):
|
||||||
|
"""Download file into given location
|
||||||
|
|
||||||
|
:param str url: URL of the file to download
|
||||||
|
:param str dest: file path to store the result in
|
||||||
|
:returns: path to the downloaded file (same as dest) or None if the URL
|
||||||
|
"""
|
||||||
|
with self.session.get(url, stream=True) as r:
|
||||||
|
if r.status_code == 404:
|
||||||
|
self.logger.warning("GET %s NOT FOUND", url)
|
||||||
|
return None
|
||||||
|
if r.status_code != 200:
|
||||||
|
self.logger.error("GET %s %s", url, r.status_code)
|
||||||
|
r.raise_for_status()
|
||||||
|
# The exception from here will be retried by the decorator.
|
||||||
|
|
||||||
|
file_size = int(r.headers.get("Content-Length", 0))
|
||||||
|
self.logger.info("GET %s OK %s", url, util.format_size(file_size))
|
||||||
|
with open(dest, "wb") as f:
|
||||||
|
shutil.copyfileobj(r.raw, f)
|
||||||
|
return dest
|
||||||
|
|
||||||
|
def _delete(self, path):
|
||||||
|
"""Try to delete file at given path and ignore errors."""
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
except Exception:
|
||||||
|
self.logger.warning("Failed to delete %s", path)
|
||||||
|
|
||||||
|
def _atomic_download(self, url, dest, validator):
|
||||||
|
"""Atomically download a file
|
||||||
|
|
||||||
|
:param str url: URL of the file to download
|
||||||
|
:param str dest: file path to store the result in
|
||||||
|
:returns: path to the downloaded file (same as dest) or None if the URL
|
||||||
|
return 404.
|
||||||
|
"""
|
||||||
|
temp_file = "%s.%s.%s" % (dest, self.unique_suffix, threading.get_ident())
|
||||||
|
|
||||||
|
# First download to the temporary location.
|
||||||
|
try:
|
||||||
|
if self._download(url, temp_file) is None:
|
||||||
|
# The file was not found.
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
# Download failed, let's make sure to clean up potentially partial
|
||||||
|
# temporary file.
|
||||||
|
self._delete(temp_file)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Check if the temporary file is correct (assuming we were provided a
|
||||||
|
# validator function).
|
||||||
|
try:
|
||||||
|
if validator:
|
||||||
|
validator(temp_file)
|
||||||
|
except Exception:
|
||||||
|
# Validation failed. Let's delete the problematic file and re-raise
|
||||||
|
# the exception.
|
||||||
|
self._delete(temp_file)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Atomically move the temporary file into final location
|
||||||
|
os.rename(temp_file, dest)
|
||||||
|
return dest
|
||||||
|
|
||||||
|
def _download_file(self, path, validator):
|
||||||
|
"""Ensure file on Koji volume in ``path`` is present in the local
|
||||||
|
cache.
|
||||||
|
|
||||||
|
:returns: path to the local file or None if file is not found
|
||||||
|
"""
|
||||||
|
url = path.replace(self.topdir, self.topurl)
|
||||||
|
destination_file = path.replace(self.topdir, self.cache_dir)
|
||||||
|
util.makedirs(os.path.dirname(destination_file))
|
||||||
|
|
||||||
|
lock = Lock(destination_file + ".lock")
|
||||||
|
# Hold the lock for this file for 5 minutes. If another compose needs
|
||||||
|
# the same file but it's not downloaded yet, the process will wait.
|
||||||
|
#
|
||||||
|
# If the download finishes in time, the downloaded file will be used
|
||||||
|
# here.
|
||||||
|
#
|
||||||
|
# If the download takes longer, this process will steal the lock and
|
||||||
|
# start its own download.
|
||||||
|
#
|
||||||
|
# That should not be a problem: the same file will be downloaded and
|
||||||
|
# then replaced atomically on the filesystem. If the original process
|
||||||
|
# managed to hardlink the first file already, that hardlink will be
|
||||||
|
# broken, but that will only result in the same file stored twice.
|
||||||
|
lock.lifetime = timedelta(minutes=5)
|
||||||
|
|
||||||
|
with lock:
|
||||||
|
# Check if the file already exists. If yes, return the path.
|
||||||
|
if os.path.exists(destination_file):
|
||||||
|
# Update mtime of the file. This covers the case of packages in the
|
||||||
|
# tag that are not included in the compose. Updating mtime will
|
||||||
|
# exempt them from cleanup for extra time.
|
||||||
|
os.utime(destination_file)
|
||||||
|
return destination_file
|
||||||
|
|
||||||
|
return self._atomic_download(url, destination_file, validator)
|
||||||
|
|
||||||
|
def get_file(self, path, validator=None):
|
||||||
|
"""
|
||||||
|
If path refers to an existing file in Koji, return a valid local path
|
||||||
|
to it. If no such file exists, return None.
|
||||||
|
|
||||||
|
:param validator: A callable that will be called with the path to the
|
||||||
|
downloaded file if and only if the file was actually downloaded.
|
||||||
|
Any exception raised from there will be abort the download and be
|
||||||
|
propagated.
|
||||||
|
"""
|
||||||
|
if self.has_local_access:
|
||||||
|
# We have koji volume mounted locally. No transformation needed for
|
||||||
|
# the path, just check it exists.
|
||||||
|
if os.path.exists(path):
|
||||||
|
return path
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
# We need to download the file.
|
||||||
|
return self._download_file(path, validator)
|
||||||
|
|
|
@ -20,6 +20,7 @@ import os
|
||||||
import shutil
|
import shutil
|
||||||
import glob
|
import glob
|
||||||
import six
|
import six
|
||||||
|
import threading
|
||||||
from six.moves import shlex_quote
|
from six.moves import shlex_quote
|
||||||
from six.moves.urllib.request import urlretrieve
|
from six.moves.urllib.request import urlretrieve
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
|
@ -29,12 +30,15 @@ from kobo.shortcuts import run, force_list
|
||||||
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
from pungi.util import explode_rpm_package, makedirs, copy_all, temp_dir, retry
|
||||||
from .kojiwrapper import KojiWrapper
|
from .kojiwrapper import KojiWrapper
|
||||||
|
|
||||||
|
lock = threading.Lock()
|
||||||
|
|
||||||
|
|
||||||
class ScmBase(kobo.log.LoggingBase):
|
class ScmBase(kobo.log.LoggingBase):
|
||||||
def __init__(self, logger=None, command=None, compose=None):
|
def __init__(self, logger=None, command=None, compose=None, options=None):
|
||||||
kobo.log.LoggingBase.__init__(self, logger=logger)
|
kobo.log.LoggingBase.__init__(self, logger=logger)
|
||||||
self.command = command
|
self.command = command
|
||||||
self.compose = compose
|
self.compose = compose
|
||||||
|
self.options = options or {}
|
||||||
|
|
||||||
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
||||||
def retry_run(self, cmd, **kwargs):
|
def retry_run(self, cmd, **kwargs):
|
||||||
|
@ -156,22 +160,31 @@ class GitWrapper(ScmBase):
|
||||||
if "://" not in repo:
|
if "://" not in repo:
|
||||||
repo = "file://%s" % repo
|
repo = "file://%s" % repo
|
||||||
|
|
||||||
|
git_cmd = ["git"]
|
||||||
|
if "credential_helper" in self.options:
|
||||||
|
git_cmd.extend(["-c", "credential.useHttpPath=true"])
|
||||||
|
git_cmd.extend(
|
||||||
|
["-c", "credential.helper=%s" % self.options["credential_helper"]]
|
||||||
|
)
|
||||||
|
|
||||||
run(["git", "init"], workdir=destdir)
|
run(["git", "init"], workdir=destdir)
|
||||||
try:
|
try:
|
||||||
run(["git", "fetch", "--depth=1", repo, branch], workdir=destdir)
|
run(git_cmd + ["fetch", "--depth=1", repo, branch], workdir=destdir)
|
||||||
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
||||||
except RuntimeError as e:
|
except RuntimeError as e:
|
||||||
# Fetch failed, to do a full clone we add a remote to our empty
|
# Fetch failed, to do a full clone we add a remote to our empty
|
||||||
# repo, get its content and check out the reference we want.
|
# repo, get its content and check out the reference we want.
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
"Trying to do a full clone because shallow clone failed: %s %s"
|
"Trying to do a full clone because shallow clone failed: %s %s"
|
||||||
% (e, e.output)
|
% (e, getattr(e, "output", ""))
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
# Re-run git init in case of previous failure breaking .git dir
|
# Re-run git init in case of previous failure breaking .git dir
|
||||||
run(["git", "init"], workdir=destdir)
|
run(["git", "init"], workdir=destdir)
|
||||||
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
||||||
self.retry_run(["git", "remote", "update", "origin"], workdir=destdir)
|
self.retry_run(
|
||||||
|
git_cmd + ["remote", "update", "origin"], workdir=destdir
|
||||||
|
)
|
||||||
run(["git", "checkout", branch], workdir=destdir)
|
run(["git", "checkout", branch], workdir=destdir)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
if self.compose:
|
if self.compose:
|
||||||
|
@ -185,19 +198,38 @@ class GitWrapper(ScmBase):
|
||||||
copy_all(destdir, debugdir)
|
copy_all(destdir, debugdir)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
self.run_process_command(destdir)
|
def get_temp_repo_path(self, scm_root, scm_branch):
|
||||||
|
scm_repo = scm_root.split("/")[-1]
|
||||||
|
process_id = os.getpid()
|
||||||
|
tmp_dir = (
|
||||||
|
"/tmp/pungi-temp-git-repos-"
|
||||||
|
+ str(process_id)
|
||||||
|
+ "/"
|
||||||
|
+ scm_repo
|
||||||
|
+ "-"
|
||||||
|
+ scm_branch
|
||||||
|
)
|
||||||
|
return tmp_dir
|
||||||
|
|
||||||
|
def setup_repo(self, scm_root, scm_branch):
|
||||||
|
tmp_dir = self.get_temp_repo_path(scm_root, scm_branch)
|
||||||
|
if not os.path.isdir(tmp_dir):
|
||||||
|
makedirs(tmp_dir)
|
||||||
|
self._clone(scm_root, scm_branch, tmp_dir)
|
||||||
|
self.run_process_command(tmp_dir)
|
||||||
|
return tmp_dir
|
||||||
|
|
||||||
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
||||||
scm_dir = scm_dir.lstrip("/")
|
scm_dir = scm_dir.lstrip("/")
|
||||||
scm_branch = scm_branch or "master"
|
scm_branch = scm_branch or "master"
|
||||||
|
|
||||||
with temp_dir() as tmp_dir:
|
self.log_debug(
|
||||||
self.log_debug(
|
"Exporting directory %s from git %s (branch %s)..."
|
||||||
"Exporting directory %s from git %s (branch %s)..."
|
% (scm_dir, scm_root, scm_branch)
|
||||||
% (scm_dir, scm_root, scm_branch)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
self._clone(scm_root, scm_branch, tmp_dir)
|
with lock:
|
||||||
|
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
||||||
|
|
||||||
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
||||||
|
|
||||||
|
@ -205,15 +237,15 @@ class GitWrapper(ScmBase):
|
||||||
scm_file = scm_file.lstrip("/")
|
scm_file = scm_file.lstrip("/")
|
||||||
scm_branch = scm_branch or "master"
|
scm_branch = scm_branch or "master"
|
||||||
|
|
||||||
with temp_dir() as tmp_dir:
|
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
||||||
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
|
||||||
|
|
||||||
self.log_debug(
|
self.log_debug(
|
||||||
"Exporting file %s from git %s (branch %s)..."
|
"Exporting file %s from git %s (branch %s)..."
|
||||||
% (scm_file, scm_root, scm_branch)
|
% (scm_file, scm_root, scm_branch)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._clone(scm_root, scm_branch, tmp_dir)
|
with lock:
|
||||||
|
tmp_dir = self.setup_repo(scm_root, scm_branch)
|
||||||
|
|
||||||
makedirs(target_dir)
|
makedirs(target_dir)
|
||||||
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
||||||
|
@ -361,15 +393,19 @@ def get_file_from_scm(scm_dict, target_path, compose=None):
|
||||||
scm_file = os.path.abspath(scm_dict)
|
scm_file = os.path.abspath(scm_dict)
|
||||||
scm_branch = None
|
scm_branch = None
|
||||||
command = None
|
command = None
|
||||||
|
options = {}
|
||||||
else:
|
else:
|
||||||
scm_type = scm_dict["scm"]
|
scm_type = scm_dict["scm"]
|
||||||
scm_repo = scm_dict["repo"]
|
scm_repo = scm_dict["repo"]
|
||||||
scm_file = scm_dict["file"]
|
scm_file = scm_dict["file"]
|
||||||
scm_branch = scm_dict.get("branch", None)
|
scm_branch = scm_dict.get("branch", None)
|
||||||
command = scm_dict.get("command")
|
command = scm_dict.get("command")
|
||||||
|
options = scm_dict.get("options", {})
|
||||||
|
|
||||||
logger = compose._logger if compose else None
|
logger = compose._logger if compose else None
|
||||||
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
scm = _get_wrapper(
|
||||||
|
scm_type, logger=logger, command=command, compose=compose, options=options
|
||||||
|
)
|
||||||
|
|
||||||
files_copied = []
|
files_copied = []
|
||||||
for i in force_list(scm_file):
|
for i in force_list(scm_file):
|
||||||
|
@ -450,15 +486,19 @@ def get_dir_from_scm(scm_dict, target_path, compose=None):
|
||||||
scm_dir = os.path.abspath(scm_dict)
|
scm_dir = os.path.abspath(scm_dict)
|
||||||
scm_branch = None
|
scm_branch = None
|
||||||
command = None
|
command = None
|
||||||
|
options = {}
|
||||||
else:
|
else:
|
||||||
scm_type = scm_dict["scm"]
|
scm_type = scm_dict["scm"]
|
||||||
scm_repo = scm_dict.get("repo", None)
|
scm_repo = scm_dict.get("repo", None)
|
||||||
scm_dir = scm_dict["dir"]
|
scm_dir = scm_dict["dir"]
|
||||||
scm_branch = scm_dict.get("branch", None)
|
scm_branch = scm_dict.get("branch", None)
|
||||||
command = scm_dict.get("command")
|
command = scm_dict.get("command")
|
||||||
|
options = scm_dict.get("options", {})
|
||||||
|
|
||||||
logger = compose._logger if compose else None
|
logger = compose._logger if compose else None
|
||||||
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
scm = _get_wrapper(
|
||||||
|
scm_type, logger=logger, command=command, compose=compose, options=options
|
||||||
|
)
|
||||||
|
|
||||||
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
||||||
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
||||||
|
|
|
@ -276,7 +276,6 @@ class Variant(object):
|
||||||
modules=None,
|
modules=None,
|
||||||
modular_koji_tags=None,
|
modular_koji_tags=None,
|
||||||
):
|
):
|
||||||
|
|
||||||
environments = environments or []
|
environments = environments or []
|
||||||
buildinstallpackages = buildinstallpackages or []
|
buildinstallpackages = buildinstallpackages or []
|
||||||
|
|
||||||
|
|
|
@ -1,705 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import atexit
|
|
||||||
import errno
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
import kobo.conf
|
|
||||||
import kobo.log
|
|
||||||
import productmd
|
|
||||||
from kobo import shortcuts
|
|
||||||
from six.moves import configparser, shlex_quote
|
|
||||||
|
|
||||||
import pungi.util
|
|
||||||
from pungi.compose import get_compose_dir
|
|
||||||
from pungi.linker import linker_pool
|
|
||||||
from pungi.phases.pkgset.sources.source_koji import get_koji_event_raw
|
|
||||||
from pungi.util import find_old_compose, parse_koji_event, temp_dir
|
|
||||||
from pungi.wrappers.kojiwrapper import KojiWrapper
|
|
||||||
|
|
||||||
|
|
||||||
Config = namedtuple(
|
|
||||||
"Config",
|
|
||||||
[
|
|
||||||
# Path to directory with the compose
|
|
||||||
"target",
|
|
||||||
"compose_type",
|
|
||||||
"label",
|
|
||||||
# Path to the selected old compose that will be reused
|
|
||||||
"old_compose",
|
|
||||||
# Path to directory with config file copies
|
|
||||||
"config_dir",
|
|
||||||
# Which koji event to use (if any)
|
|
||||||
"event",
|
|
||||||
# Additional arguments to pungi-koji executable
|
|
||||||
"extra_args",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Status(object):
|
|
||||||
# Ready to start
|
|
||||||
READY = "READY"
|
|
||||||
# Waiting for dependencies to finish.
|
|
||||||
WAITING = "WAITING"
|
|
||||||
# Part is currently running
|
|
||||||
STARTED = "STARTED"
|
|
||||||
# A dependency failed, this one will never start.
|
|
||||||
BLOCKED = "BLOCKED"
|
|
||||||
|
|
||||||
|
|
||||||
class ComposePart(object):
|
|
||||||
def __init__(self, name, config, just_phase=[], skip_phase=[], dependencies=[]):
|
|
||||||
self.name = name
|
|
||||||
self.config = config
|
|
||||||
self.status = Status.WAITING if dependencies else Status.READY
|
|
||||||
self.just_phase = just_phase
|
|
||||||
self.skip_phase = skip_phase
|
|
||||||
self.blocked_on = set(dependencies)
|
|
||||||
self.depends_on = set(dependencies)
|
|
||||||
self.path = None
|
|
||||||
self.log_file = None
|
|
||||||
self.failable = False
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
"ComposePart({0.name!r},"
|
|
||||||
" {0.config!r},"
|
|
||||||
" {0.status!r},"
|
|
||||||
" just_phase={0.just_phase!r},"
|
|
||||||
" skip_phase={0.skip_phase!r},"
|
|
||||||
" dependencies={0.depends_on!r})"
|
|
||||||
).format(self)
|
|
||||||
|
|
||||||
def refresh_status(self):
|
|
||||||
"""Refresh status of this part with the result of the compose. This
|
|
||||||
should only be called once the compose finished.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
with open(os.path.join(self.path, "STATUS")) as fh:
|
|
||||||
self.status = fh.read().strip()
|
|
||||||
except IOError as exc:
|
|
||||||
log.error("Failed to update status of %s: %s", self.name, exc)
|
|
||||||
log.error("Assuming %s is DOOMED", self.name)
|
|
||||||
self.status = "DOOMED"
|
|
||||||
|
|
||||||
def is_finished(self):
|
|
||||||
return "FINISHED" in self.status
|
|
||||||
|
|
||||||
def unblock_on(self, finished_part):
|
|
||||||
"""Update set of blockers for this part. If it's empty, mark us as ready."""
|
|
||||||
self.blocked_on.discard(finished_part)
|
|
||||||
if self.status == Status.WAITING and not self.blocked_on:
|
|
||||||
log.debug("%s is ready to start", self)
|
|
||||||
self.status = Status.READY
|
|
||||||
|
|
||||||
def setup_start(self, global_config, parts):
|
|
||||||
substitutions = dict(
|
|
||||||
("part-%s" % name, p.path) for name, p in parts.items() if p.is_finished()
|
|
||||||
)
|
|
||||||
substitutions["configdir"] = global_config.config_dir
|
|
||||||
|
|
||||||
config = pungi.util.load_config(self.config)
|
|
||||||
|
|
||||||
for f in config.opened_files:
|
|
||||||
# apply substitutions
|
|
||||||
fill_in_config_file(f, substitutions)
|
|
||||||
|
|
||||||
self.status = Status.STARTED
|
|
||||||
self.path = get_compose_dir(
|
|
||||||
os.path.join(global_config.target, "parts"),
|
|
||||||
config,
|
|
||||||
compose_type=global_config.compose_type,
|
|
||||||
compose_label=global_config.label,
|
|
||||||
)
|
|
||||||
self.log_file = os.path.join(global_config.target, "logs", "%s.log" % self.name)
|
|
||||||
log.info("Starting %s in %s", self.name, self.path)
|
|
||||||
|
|
||||||
def get_cmd(self, global_config):
|
|
||||||
cmd = ["pungi-koji", "--config", self.config, "--compose-dir", self.path]
|
|
||||||
cmd.append("--%s" % global_config.compose_type)
|
|
||||||
if global_config.label:
|
|
||||||
cmd.extend(["--label", global_config.label])
|
|
||||||
for phase in self.just_phase:
|
|
||||||
cmd.extend(["--just-phase", phase])
|
|
||||||
for phase in self.skip_phase:
|
|
||||||
cmd.extend(["--skip-phase", phase])
|
|
||||||
if global_config.old_compose:
|
|
||||||
cmd.extend(
|
|
||||||
["--old-compose", os.path.join(global_config.old_compose, "parts")]
|
|
||||||
)
|
|
||||||
if global_config.event:
|
|
||||||
cmd.extend(["--koji-event", str(global_config.event)])
|
|
||||||
if global_config.extra_args:
|
|
||||||
cmd.extend(global_config.extra_args)
|
|
||||||
cmd.extend(["--no-latest-link"])
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_config(cls, config, section, config_dir):
|
|
||||||
part = cls(
|
|
||||||
name=section,
|
|
||||||
config=os.path.join(config_dir, config.get(section, "config")),
|
|
||||||
just_phase=_safe_get_list(config, section, "just_phase", []),
|
|
||||||
skip_phase=_safe_get_list(config, section, "skip_phase", []),
|
|
||||||
dependencies=_safe_get_list(config, section, "depends_on", []),
|
|
||||||
)
|
|
||||||
if config.has_option(section, "failable"):
|
|
||||||
part.failable = config.getboolean(section, "failable")
|
|
||||||
return part
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_get_list(config, section, option, default=None):
|
|
||||||
"""Get a value from config parser. The result is split into a list on
|
|
||||||
commas or spaces, and `default` is returned if the key does not exist.
|
|
||||||
"""
|
|
||||||
if config.has_option(section, option):
|
|
||||||
value = config.get(section, option)
|
|
||||||
return [x.strip() for x in re.split(r"[, ]+", value) if x]
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def fill_in_config_file(fp, substs):
|
|
||||||
"""Templating function. It works with Jinja2 style placeholders such as
|
|
||||||
{{foo}}. Whitespace around the key name is fine. The file is modified in place.
|
|
||||||
|
|
||||||
:param fp string: path to the file to process
|
|
||||||
:param substs dict: a mapping for values to put into the file
|
|
||||||
"""
|
|
||||||
|
|
||||||
def repl(match):
|
|
||||||
try:
|
|
||||||
return substs[match.group(1)]
|
|
||||||
except KeyError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Unknown placeholder %s in %s" % (exc, os.path.basename(fp))
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(fp, "r") as f:
|
|
||||||
contents = re.sub(r"{{ *([a-zA-Z-_]+) *}}", repl, f.read())
|
|
||||||
with open(fp, "w") as f:
|
|
||||||
f.write(contents)
|
|
||||||
|
|
||||||
|
|
||||||
def start_part(global_config, parts, part):
|
|
||||||
part.setup_start(global_config, parts)
|
|
||||||
fh = open(part.log_file, "w")
|
|
||||||
cmd = part.get_cmd(global_config)
|
|
||||||
log.debug("Running command %r", " ".join(shlex_quote(x) for x in cmd))
|
|
||||||
return subprocess.Popen(cmd, stdout=fh, stderr=subprocess.STDOUT)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_finished(global_config, linker, parts, proc, finished_part):
|
|
||||||
finished_part.refresh_status()
|
|
||||||
log.info("%s finished with status %s", finished_part, finished_part.status)
|
|
||||||
if proc.returncode == 0:
|
|
||||||
# Success, unblock other parts...
|
|
||||||
for part in parts.values():
|
|
||||||
part.unblock_on(finished_part.name)
|
|
||||||
# ...and link the results into final destination.
|
|
||||||
copy_part(global_config, linker, finished_part)
|
|
||||||
update_metadata(global_config, finished_part)
|
|
||||||
else:
|
|
||||||
# Failure, other stuff may be blocked.
|
|
||||||
log.info("See details in %s", finished_part.log_file)
|
|
||||||
block_on(parts, finished_part.name)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_part(global_config, linker, part):
|
|
||||||
c = productmd.Compose(part.path)
|
|
||||||
for variant in c.info.variants:
|
|
||||||
data_path = os.path.join(part.path, "compose", variant)
|
|
||||||
link = os.path.join(global_config.target, "compose", variant)
|
|
||||||
log.info("Hardlinking content %s -> %s", data_path, link)
|
|
||||||
hardlink_dir(linker, data_path, link)
|
|
||||||
|
|
||||||
|
|
||||||
def hardlink_dir(linker, srcdir, dstdir):
|
|
||||||
for root, dirs, files in os.walk(srcdir):
|
|
||||||
root = os.path.relpath(root, srcdir)
|
|
||||||
for f in files:
|
|
||||||
src = os.path.normpath(os.path.join(srcdir, root, f))
|
|
||||||
dst = os.path.normpath(os.path.join(dstdir, root, f))
|
|
||||||
linker.queue_put((src, dst))
|
|
||||||
|
|
||||||
|
|
||||||
def update_metadata(global_config, part):
|
|
||||||
part_metadata_dir = os.path.join(part.path, "compose", "metadata")
|
|
||||||
final_metadata_dir = os.path.join(global_config.target, "compose", "metadata")
|
|
||||||
for f in os.listdir(part_metadata_dir):
|
|
||||||
# Load the metadata
|
|
||||||
with open(os.path.join(part_metadata_dir, f)) as fh:
|
|
||||||
part_metadata = json.load(fh)
|
|
||||||
final_metadata = os.path.join(final_metadata_dir, f)
|
|
||||||
if os.path.exists(final_metadata):
|
|
||||||
# We already have this file, will need to merge.
|
|
||||||
merge_metadata(final_metadata, part_metadata)
|
|
||||||
else:
|
|
||||||
# A new file, just copy it.
|
|
||||||
copy_metadata(global_config, final_metadata, part_metadata)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_metadata(global_config, final_metadata, source):
|
|
||||||
"""Copy file to final location, but update compose information."""
|
|
||||||
with open(
|
|
||||||
os.path.join(global_config.target, "compose/metadata/composeinfo.json")
|
|
||||||
) as f:
|
|
||||||
composeinfo = json.load(f)
|
|
||||||
try:
|
|
||||||
source["payload"]["compose"].update(composeinfo["payload"]["compose"])
|
|
||||||
except KeyError:
|
|
||||||
# No [payload][compose], probably OSBS metadata
|
|
||||||
pass
|
|
||||||
with open(final_metadata, "w") as f:
|
|
||||||
json.dump(source, f, indent=2, sort_keys=True)
|
|
||||||
|
|
||||||
|
|
||||||
def merge_metadata(final_metadata, source):
|
|
||||||
with open(final_metadata) as f:
|
|
||||||
metadata = json.load(f)
|
|
||||||
|
|
||||||
try:
|
|
||||||
key = {
|
|
||||||
"productmd.composeinfo": "variants",
|
|
||||||
"productmd.modules": "modules",
|
|
||||||
"productmd.images": "images",
|
|
||||||
"productmd.rpms": "rpms",
|
|
||||||
}[source["header"]["type"]]
|
|
||||||
# TODO what if multiple parts create images for the same variant
|
|
||||||
metadata["payload"][key].update(source["payload"][key])
|
|
||||||
except KeyError:
|
|
||||||
# OSBS metadata, merge whole file
|
|
||||||
metadata.update(source)
|
|
||||||
with open(final_metadata, "w") as f:
|
|
||||||
json.dump(metadata, f, indent=2, sort_keys=True)
|
|
||||||
|
|
||||||
|
|
||||||
def block_on(parts, name):
|
|
||||||
"""Part ``name`` failed, mark everything depending on it as blocked."""
|
|
||||||
for part in parts.values():
|
|
||||||
if name in part.blocked_on:
|
|
||||||
log.warning("%s is blocked now and will not run", part)
|
|
||||||
part.status = Status.BLOCKED
|
|
||||||
block_on(parts, part.name)
|
|
||||||
|
|
||||||
|
|
||||||
def check_finished_processes(processes):
|
|
||||||
"""Walk through all active processes and check if something finished."""
|
|
||||||
for proc in processes.keys():
|
|
||||||
proc.poll()
|
|
||||||
if proc.returncode is not None:
|
|
||||||
yield proc, processes[proc]
|
|
||||||
|
|
||||||
|
|
||||||
def run_all(global_config, parts):
|
|
||||||
# Mapping subprocess.Popen -> ComposePart
|
|
||||||
processes = dict()
|
|
||||||
remaining = set(p.name for p in parts.values() if not p.is_finished())
|
|
||||||
|
|
||||||
with linker_pool("hardlink") as linker:
|
|
||||||
while remaining or processes:
|
|
||||||
update_status(global_config, parts)
|
|
||||||
|
|
||||||
for proc, part in check_finished_processes(processes):
|
|
||||||
del processes[proc]
|
|
||||||
handle_finished(global_config, linker, parts, proc, part)
|
|
||||||
|
|
||||||
# Start new available processes.
|
|
||||||
for name in list(remaining):
|
|
||||||
part = parts[name]
|
|
||||||
# Start all ready parts
|
|
||||||
if part.status == Status.READY:
|
|
||||||
remaining.remove(name)
|
|
||||||
processes[start_part(global_config, parts, part)] = part
|
|
||||||
# Remove blocked parts from todo list
|
|
||||||
elif part.status == Status.BLOCKED:
|
|
||||||
remaining.remove(part.name)
|
|
||||||
|
|
||||||
# Wait for any child process to finish if there is any.
|
|
||||||
if processes:
|
|
||||||
pid, reason = os.wait()
|
|
||||||
for proc in processes.keys():
|
|
||||||
# Set the return code for process that we caught by os.wait().
|
|
||||||
# Calling poll() on it would not set the return code properly
|
|
||||||
# since the value was already consumed by os.wait().
|
|
||||||
if proc.pid == pid:
|
|
||||||
proc.returncode = (reason >> 8) & 0xFF
|
|
||||||
|
|
||||||
log.info("Waiting for linking to finish...")
|
|
||||||
return update_status(global_config, parts)
|
|
||||||
|
|
||||||
|
|
||||||
def get_target_dir(config, compose_info, label, reldir=""):
|
|
||||||
"""Find directory where this compose will be.
|
|
||||||
|
|
||||||
@param reldir: if target path in config is relative, it will be resolved
|
|
||||||
against this directory
|
|
||||||
"""
|
|
||||||
dir = os.path.realpath(os.path.join(reldir, config.get("general", "target")))
|
|
||||||
target_dir = get_compose_dir(
|
|
||||||
dir,
|
|
||||||
compose_info,
|
|
||||||
compose_type=config.get("general", "compose_type"),
|
|
||||||
compose_label=label,
|
|
||||||
)
|
|
||||||
return target_dir
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(debug=False):
|
|
||||||
FORMAT = "%(asctime)s: %(levelname)s: %(message)s"
|
|
||||||
level = logging.DEBUG if debug else logging.INFO
|
|
||||||
kobo.log.add_stderr_logger(log, log_level=level, format=FORMAT)
|
|
||||||
log.setLevel(level)
|
|
||||||
|
|
||||||
|
|
||||||
def compute_status(statuses):
|
|
||||||
if any(map(lambda x: x[0] in ("STARTED", "WAITING"), statuses)):
|
|
||||||
# If there is anything still running or waiting to start, the whole is
|
|
||||||
# still running.
|
|
||||||
return "STARTED"
|
|
||||||
elif any(map(lambda x: x[0] in ("DOOMED", "BLOCKED") and not x[1], statuses)):
|
|
||||||
# If any required part is doomed or blocked, the whole is doomed
|
|
||||||
return "DOOMED"
|
|
||||||
elif all(map(lambda x: x[0] == "FINISHED", statuses)):
|
|
||||||
# If all parts are complete, the whole is complete
|
|
||||||
return "FINISHED"
|
|
||||||
else:
|
|
||||||
return "FINISHED_INCOMPLETE"
|
|
||||||
|
|
||||||
|
|
||||||
def update_status(global_config, parts):
|
|
||||||
log.debug("Updating status metadata")
|
|
||||||
metadata = {}
|
|
||||||
statuses = set()
|
|
||||||
for part in parts.values():
|
|
||||||
metadata[part.name] = {"status": part.status, "path": part.path}
|
|
||||||
statuses.add((part.status, part.failable))
|
|
||||||
metadata_path = os.path.join(
|
|
||||||
global_config.target, "compose", "metadata", "parts.json"
|
|
||||||
)
|
|
||||||
with open(metadata_path, "w") as fh:
|
|
||||||
json.dump(metadata, fh, indent=2, sort_keys=True, separators=(",", ": "))
|
|
||||||
|
|
||||||
status = compute_status(statuses)
|
|
||||||
log.info("Overall status is %s", status)
|
|
||||||
with open(os.path.join(global_config.target, "STATUS"), "w") as fh:
|
|
||||||
fh.write(status)
|
|
||||||
|
|
||||||
return status != "DOOMED"
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_compose_dir(config, args, main_config_file, compose_info):
|
|
||||||
if not hasattr(args, "compose_path"):
|
|
||||||
# Creating a brand new compose
|
|
||||||
target_dir = get_target_dir(
|
|
||||||
config, compose_info, args.label, reldir=os.path.dirname(main_config_file)
|
|
||||||
)
|
|
||||||
for dir in ("logs", "parts", "compose/metadata", "work/global"):
|
|
||||||
try:
|
|
||||||
os.makedirs(os.path.join(target_dir, dir))
|
|
||||||
except OSError as exc:
|
|
||||||
if exc.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
with open(os.path.join(target_dir, "STATUS"), "w") as fh:
|
|
||||||
fh.write("STARTED")
|
|
||||||
# Copy initial composeinfo for new compose
|
|
||||||
shutil.copy(
|
|
||||||
os.path.join(target_dir, "work/global/composeinfo-base.json"),
|
|
||||||
os.path.join(target_dir, "compose/metadata/composeinfo.json"),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Restarting a particular compose
|
|
||||||
target_dir = args.compose_path
|
|
||||||
|
|
||||||
return target_dir
|
|
||||||
|
|
||||||
|
|
||||||
def load_parts_metadata(global_config):
|
|
||||||
parts_metadata = os.path.join(global_config.target, "compose/metadata/parts.json")
|
|
||||||
with open(parts_metadata) as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_for_restart(global_config, parts, to_restart):
|
|
||||||
has_stuff_to_do = False
|
|
||||||
metadata = load_parts_metadata(global_config)
|
|
||||||
for key in metadata:
|
|
||||||
# Update state to match what is on disk
|
|
||||||
log.debug(
|
|
||||||
"Reusing %s (%s) from %s",
|
|
||||||
key,
|
|
||||||
metadata[key]["status"],
|
|
||||||
metadata[key]["path"],
|
|
||||||
)
|
|
||||||
parts[key].status = metadata[key]["status"]
|
|
||||||
parts[key].path = metadata[key]["path"]
|
|
||||||
for key in to_restart:
|
|
||||||
# Set restarted parts to run again
|
|
||||||
parts[key].status = Status.WAITING
|
|
||||||
parts[key].path = None
|
|
||||||
|
|
||||||
for key in to_restart:
|
|
||||||
# Remove blockers that are already finished
|
|
||||||
for blocker in list(parts[key].blocked_on):
|
|
||||||
if parts[blocker].is_finished():
|
|
||||||
parts[key].blocked_on.discard(blocker)
|
|
||||||
if not parts[key].blocked_on:
|
|
||||||
log.debug("Part %s in not blocked", key)
|
|
||||||
# Nothing blocks it; let's go
|
|
||||||
parts[key].status = Status.READY
|
|
||||||
has_stuff_to_do = True
|
|
||||||
|
|
||||||
if not has_stuff_to_do:
|
|
||||||
raise RuntimeError("All restarted parts are blocked. Nothing to do.")
|
|
||||||
|
|
||||||
|
|
||||||
def run_kinit(config):
|
|
||||||
if not config.getboolean("general", "kerberos"):
|
|
||||||
return
|
|
||||||
|
|
||||||
keytab = config.get("general", "kerberos_keytab")
|
|
||||||
principal = config.get("general", "kerberos_principal")
|
|
||||||
|
|
||||||
fd, fname = tempfile.mkstemp(prefix="krb5cc_pungi-orchestrate_")
|
|
||||||
os.close(fd)
|
|
||||||
os.environ["KRB5CCNAME"] = fname
|
|
||||||
shortcuts.run(["kinit", "-k", "-t", keytab, principal])
|
|
||||||
log.debug("Created a kerberos ticket for %s", principal)
|
|
||||||
|
|
||||||
atexit.register(os.remove, fname)
|
|
||||||
|
|
||||||
|
|
||||||
def get_compose_data(compose_path):
|
|
||||||
try:
|
|
||||||
compose = productmd.compose.Compose(compose_path)
|
|
||||||
data = {
|
|
||||||
"compose_id": compose.info.compose.id,
|
|
||||||
"compose_date": compose.info.compose.date,
|
|
||||||
"compose_type": compose.info.compose.type,
|
|
||||||
"compose_respin": str(compose.info.compose.respin),
|
|
||||||
"compose_label": compose.info.compose.label,
|
|
||||||
"release_id": compose.info.release_id,
|
|
||||||
"release_name": compose.info.release.name,
|
|
||||||
"release_short": compose.info.release.short,
|
|
||||||
"release_version": compose.info.release.version,
|
|
||||||
"release_type": compose.info.release.type,
|
|
||||||
"release_is_layered": compose.info.release.is_layered,
|
|
||||||
}
|
|
||||||
if compose.info.release.is_layered:
|
|
||||||
data.update(
|
|
||||||
{
|
|
||||||
"base_product_name": compose.info.base_product.name,
|
|
||||||
"base_product_short": compose.info.base_product.short,
|
|
||||||
"base_product_version": compose.info.base_product.version,
|
|
||||||
"base_product_type": compose.info.base_product.type,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return data
|
|
||||||
except Exception:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def get_script_env(compose_path):
|
|
||||||
env = os.environ.copy()
|
|
||||||
env["COMPOSE_PATH"] = compose_path
|
|
||||||
for key, value in get_compose_data(compose_path).items():
|
|
||||||
if isinstance(value, bool):
|
|
||||||
env[key.upper()] = "YES" if value else ""
|
|
||||||
else:
|
|
||||||
env[key.upper()] = str(value) if value else ""
|
|
||||||
return env
|
|
||||||
|
|
||||||
|
|
||||||
def run_scripts(prefix, compose_dir, scripts):
|
|
||||||
env = get_script_env(compose_dir)
|
|
||||||
for idx, script in enumerate(scripts.strip().splitlines()):
|
|
||||||
command = script.strip()
|
|
||||||
logfile = os.path.join(compose_dir, "logs", "%s%s.log" % (prefix, idx))
|
|
||||||
log.debug("Running command: %r", command)
|
|
||||||
log.debug("See output in %s", logfile)
|
|
||||||
shortcuts.run(command, env=env, logfile=logfile)
|
|
||||||
|
|
||||||
|
|
||||||
def try_translate_path(parts, path):
|
|
||||||
translation = []
|
|
||||||
for part in parts.values():
|
|
||||||
conf = pungi.util.load_config(part.config)
|
|
||||||
translation.extend(conf.get("translate_paths", []))
|
|
||||||
return pungi.util.translate_path_raw(translation, path)
|
|
||||||
|
|
||||||
|
|
||||||
def send_notification(compose_dir, command, parts):
|
|
||||||
if not command:
|
|
||||||
return
|
|
||||||
from pungi.notifier import PungiNotifier
|
|
||||||
|
|
||||||
data = get_compose_data(compose_dir)
|
|
||||||
data["location"] = try_translate_path(parts, compose_dir)
|
|
||||||
notifier = PungiNotifier([command])
|
|
||||||
with open(os.path.join(compose_dir, "STATUS")) as f:
|
|
||||||
status = f.read().strip()
|
|
||||||
notifier.send("status-change", workdir=compose_dir, status=status, **data)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_progress_monitor(global_config, parts):
|
|
||||||
"""Update configuration so that each part send notifications about its
|
|
||||||
progress to the orchestrator.
|
|
||||||
|
|
||||||
There is a file to which the notification is written. The orchestrator is
|
|
||||||
reading it and mapping the entries to particular parts. The path to this
|
|
||||||
file is stored in an environment variable.
|
|
||||||
"""
|
|
||||||
tmp_file = tempfile.NamedTemporaryFile(prefix="pungi-progress-monitor_")
|
|
||||||
os.environ["_PUNGI_ORCHESTRATOR_PROGRESS_MONITOR"] = tmp_file.name
|
|
||||||
atexit.register(os.remove, tmp_file.name)
|
|
||||||
|
|
||||||
global_config.extra_args.append(
|
|
||||||
"--notification-script=pungi-notification-report-progress"
|
|
||||||
)
|
|
||||||
|
|
||||||
def reader():
|
|
||||||
while True:
|
|
||||||
line = tmp_file.readline()
|
|
||||||
if not line:
|
|
||||||
time.sleep(0.1)
|
|
||||||
continue
|
|
||||||
path, msg = line.split(":", 1)
|
|
||||||
for part in parts:
|
|
||||||
if parts[part].path == os.path.dirname(path):
|
|
||||||
log.debug("%s: %s", part, msg.strip())
|
|
||||||
break
|
|
||||||
|
|
||||||
monitor = threading.Thread(target=reader)
|
|
||||||
monitor.daemon = True
|
|
||||||
monitor.start()
|
|
||||||
|
|
||||||
|
|
||||||
def run(work_dir, main_config_file, args):
|
|
||||||
config_dir = os.path.join(work_dir, "config")
|
|
||||||
shutil.copytree(os.path.dirname(main_config_file), config_dir)
|
|
||||||
|
|
||||||
# Read main config
|
|
||||||
parser = configparser.RawConfigParser(
|
|
||||||
defaults={
|
|
||||||
"kerberos": "false",
|
|
||||||
"pre_compose_script": "",
|
|
||||||
"post_compose_script": "",
|
|
||||||
"notification_script": "",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
parser.read(main_config_file)
|
|
||||||
|
|
||||||
# Create kerberos ticket
|
|
||||||
run_kinit(parser)
|
|
||||||
|
|
||||||
compose_info = dict(parser.items("general"))
|
|
||||||
compose_type = parser.get("general", "compose_type")
|
|
||||||
|
|
||||||
target_dir = prepare_compose_dir(parser, args, main_config_file, compose_info)
|
|
||||||
kobo.log.add_file_logger(log, os.path.join(target_dir, "logs", "orchestrator.log"))
|
|
||||||
log.info("Composing %s", target_dir)
|
|
||||||
|
|
||||||
run_scripts("pre_compose_", target_dir, parser.get("general", "pre_compose_script"))
|
|
||||||
|
|
||||||
old_compose = find_old_compose(
|
|
||||||
os.path.dirname(target_dir),
|
|
||||||
compose_info["release_short"],
|
|
||||||
compose_info["release_version"],
|
|
||||||
"",
|
|
||||||
)
|
|
||||||
if old_compose:
|
|
||||||
log.info("Reusing old compose %s", old_compose)
|
|
||||||
|
|
||||||
global_config = Config(
|
|
||||||
target=target_dir,
|
|
||||||
compose_type=compose_type,
|
|
||||||
label=args.label,
|
|
||||||
old_compose=old_compose,
|
|
||||||
config_dir=os.path.dirname(main_config_file),
|
|
||||||
event=args.koji_event,
|
|
||||||
extra_args=_safe_get_list(parser, "general", "extra_args"),
|
|
||||||
)
|
|
||||||
|
|
||||||
if not global_config.event and parser.has_option("general", "koji_profile"):
|
|
||||||
koji_wrapper = KojiWrapper(parser.get("general", "koji_profile"))
|
|
||||||
event_file = os.path.join(global_config.target, "work/global/koji-event")
|
|
||||||
result = get_koji_event_raw(koji_wrapper, None, event_file)
|
|
||||||
global_config = global_config._replace(event=result["id"])
|
|
||||||
|
|
||||||
parts = {}
|
|
||||||
for section in parser.sections():
|
|
||||||
if section == "general":
|
|
||||||
continue
|
|
||||||
parts[section] = ComposePart.from_config(parser, section, config_dir)
|
|
||||||
|
|
||||||
if hasattr(args, "part"):
|
|
||||||
setup_for_restart(global_config, parts, args.part)
|
|
||||||
|
|
||||||
setup_progress_monitor(global_config, parts)
|
|
||||||
|
|
||||||
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
|
||||||
|
|
||||||
retcode = run_all(global_config, parts)
|
|
||||||
|
|
||||||
if retcode:
|
|
||||||
# Only run the script if we are not doomed.
|
|
||||||
run_scripts(
|
|
||||||
"post_compose_", target_dir, parser.get("general", "post_compose_script")
|
|
||||||
)
|
|
||||||
|
|
||||||
send_notification(target_dir, parser.get("general", "notification_script"), parts)
|
|
||||||
|
|
||||||
return retcode
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args(argv):
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("--debug", action="store_true")
|
|
||||||
parser.add_argument("--koji-event", metavar="ID", type=parse_koji_event)
|
|
||||||
subparsers = parser.add_subparsers()
|
|
||||||
start = subparsers.add_parser("start")
|
|
||||||
start.add_argument("config", metavar="CONFIG")
|
|
||||||
start.add_argument("--label")
|
|
||||||
|
|
||||||
restart = subparsers.add_parser("restart")
|
|
||||||
restart.add_argument("config", metavar="CONFIG")
|
|
||||||
restart.add_argument("compose_path", metavar="COMPOSE_PATH")
|
|
||||||
restart.add_argument(
|
|
||||||
"part", metavar="PART", nargs="*", help="which parts to restart"
|
|
||||||
)
|
|
||||||
restart.add_argument("--label")
|
|
||||||
|
|
||||||
return parser.parse_args(argv)
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
args = parse_args(argv)
|
|
||||||
setup_logging(args.debug)
|
|
||||||
|
|
||||||
main_config_file = os.path.abspath(args.config)
|
|
||||||
|
|
||||||
with temp_dir() as work_dir:
|
|
||||||
try:
|
|
||||||
if not run(work_dir, main_config_file, args):
|
|
||||||
sys.exit(1)
|
|
||||||
except Exception:
|
|
||||||
log.exception("Unhandled exception!")
|
|
||||||
sys.exit(1)
|
|
|
@ -1,7 +1,8 @@
|
||||||
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
# Some packages must be installed via dnf/yum first, see doc/contributing.rst
|
||||||
dict.sorted
|
dict.sorted
|
||||||
dogpile.cache
|
dogpile.cache
|
||||||
fedmsg
|
flufl.lock ; python_version >= '3.0'
|
||||||
|
flufl.lock < 3.0 ; python_version <= '2.7'
|
||||||
funcsigs
|
funcsigs
|
||||||
jsonschema
|
jsonschema
|
||||||
kobo
|
kobo
|
||||||
|
|
16
setup.py
16
setup.py
|
@ -5,14 +5,9 @@
|
||||||
import os
|
import os
|
||||||
import glob
|
import glob
|
||||||
|
|
||||||
import distutils.command.sdist
|
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
|
|
||||||
# override default tarball format with bzip2
|
|
||||||
distutils.command.sdist.sdist.default_format = {"posix": "bztar"}
|
|
||||||
|
|
||||||
|
|
||||||
# recursively scan for python modules to be included
|
# recursively scan for python modules to be included
|
||||||
package_root_dirs = ["pungi", "pungi_utils"]
|
package_root_dirs = ["pungi", "pungi_utils"]
|
||||||
packages = set()
|
packages = set()
|
||||||
|
@ -25,7 +20,7 @@ packages = sorted(packages)
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="pungi",
|
name="pungi",
|
||||||
version="4.3.6",
|
version="4.5.0",
|
||||||
description="Distribution compose tool",
|
description="Distribution compose tool",
|
||||||
url="https://pagure.io/pungi",
|
url="https://pagure.io/pungi",
|
||||||
author="Dennis Gilmore",
|
author="Dennis Gilmore",
|
||||||
|
@ -41,16 +36,21 @@ setup(
|
||||||
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
"pungi-patch-iso = pungi.scripts.patch_iso:cli_main",
|
||||||
"pungi-make-ostree = pungi.ostree:main",
|
"pungi-make-ostree = pungi.ostree:main",
|
||||||
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
"pungi-notification-report-progress = pungi.scripts.report_progress:main",
|
||||||
"pungi-orchestrate = pungi_utils.orchestrator:main",
|
|
||||||
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
"pungi-wait-for-signed-ostree-handler = pungi.scripts.wait_for_signed_ostree_handler:main", # noqa: E501
|
||||||
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
"pungi-koji = pungi.scripts.pungi_koji:cli_main",
|
||||||
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
"pungi-gather = pungi.scripts.pungi_gather:cli_main",
|
||||||
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
"pungi-config-dump = pungi.scripts.config_dump:cli_main",
|
||||||
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
"pungi-config-validate = pungi.scripts.config_validate:cli_main",
|
||||||
|
"pungi-cache-cleanup = pungi.scripts.cache_cleanup:main",
|
||||||
|
"pungi-gather-modules = pungi.scripts.gather_modules:cli_main",
|
||||||
|
"pungi-gather-rpms = pungi.scripts.gather_rpms:cli_main",
|
||||||
|
"pungi-generate-packages-json = pungi.scripts.create_packages_json:cli_main", # noqa: E501
|
||||||
|
"pungi-create-extra-repo = pungi.scripts.create_extra_repo:cli_main"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
scripts=["contrib/yum-dnf-compare/pungi-compare-depsolving"],
|
||||||
data_files=[
|
data_files=[
|
||||||
|
("/usr/lib/tmpfiles.d", glob.glob("contrib/tmpfiles.d/*.conf")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
("/usr/share/pungi", glob.glob("share/*.xsl")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.ks")),
|
("/usr/share/pungi", glob.glob("share/*.ks")),
|
||||||
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
("/usr/share/pungi", glob.glob("share/*.dtd")),
|
||||||
|
@ -66,5 +66,5 @@ setup(
|
||||||
"dogpile.cache",
|
"dogpile.cache",
|
||||||
],
|
],
|
||||||
extras_require={':python_version=="2.7"': ["enum34", "lockfile"]},
|
extras_require={':python_version=="2.7"': ["enum34", "lockfile"]},
|
||||||
tests_require=["mock", "pytest", "pytest-cov"],
|
tests_require=["mock", "pytest", "pytest-cov", "pyfakefs"],
|
||||||
)
|
)
|
||||||
|
|
|
@ -108,6 +108,7 @@
|
||||||
<groupid>core</groupid>
|
<groupid>core</groupid>
|
||||||
</grouplist>
|
</grouplist>
|
||||||
<optionlist>
|
<optionlist>
|
||||||
|
<groupid arch="x86_64">standard</groupid>
|
||||||
</optionlist>
|
</optionlist>
|
||||||
</environment>
|
</environment>
|
||||||
|
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,36 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||||
|
<revision>1612479076</revision>
|
||||||
|
<data type="primary">
|
||||||
|
<checksum type="sha256">08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb</checksum>
|
||||||
|
<open-checksum type="sha256">2a15e618f049a883d360ccbf3e764b30640255f47dc526c633b1722fe23cbcbc</open-checksum>
|
||||||
|
<location href="repodata/08941fae6bdb14f3b22bfad38b9d7dcb685a9df58fe8f515a3a0b2fe1af903bb-primary.xml.gz"/>
|
||||||
|
<timestamp>1612479075</timestamp>
|
||||||
|
<size>1240</size>
|
||||||
|
<open-size>3888</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="filelists">
|
||||||
|
<checksum type="sha256">e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f</checksum>
|
||||||
|
<open-checksum type="sha256">b1782bc4207a5b7c3e64115d5a1d001802e8d363f022ea165df7cdab6f14651c</open-checksum>
|
||||||
|
<location href="repodata/e37a0b4a63b2b245dca1727195300cd3961f80aebc82ae7b9849dbf7482f5d0f-filelists.xml.gz"/>
|
||||||
|
<timestamp>1612479075</timestamp>
|
||||||
|
<size>439</size>
|
||||||
|
<open-size>1295</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="other">
|
||||||
|
<checksum type="sha256">92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1</checksum>
|
||||||
|
<open-checksum type="sha256">3b847919691ad32279b13463de6c08f1f8b32f51e87b7d8d7e95a3ec2f46ef51</open-checksum>
|
||||||
|
<location href="repodata/92992176bce71dcde9e4b6ad1442e7b5c7f3de9b7f019a2cd27d042ab38ea2b1-other.xml.gz"/>
|
||||||
|
<timestamp>1612479075</timestamp>
|
||||||
|
<size>630</size>
|
||||||
|
<open-size>1911</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="modules">
|
||||||
|
<checksum type="sha256">e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57</checksum>
|
||||||
|
<open-checksum type="sha256">d59fee86c18018cc18bb7325aa74aa0abf923c64d29a4ec45e08dcd01a0c3966</open-checksum>
|
||||||
|
<location href="repodata/e7a671401f8e207e4cd3b90b4ac92d621f84a34dc9026f57c3f427fbed444c57-modules.yaml.gz"/>
|
||||||
|
<timestamp>1612479075</timestamp>
|
||||||
|
<size>920</size>
|
||||||
|
<open-size>3308</open-size>
|
||||||
|
</data>
|
||||||
|
</repomd>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,55 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||||
|
<revision>1666177486</revision>
|
||||||
|
<data type="primary">
|
||||||
|
<checksum type="sha256">89cb9cc1181635c9147864a7076d91fb81072641d481cd202832a2d257453576</checksum>
|
||||||
|
<open-checksum type="sha256">07255d9856f7531b52a6459f6fc7701c6d93c6d6c29d1382d83afcc53f13494a</open-checksum>
|
||||||
|
<location href="repodata/89cb9cc1181635c9147864a7076d91fb81072641d481cd202832a2d257453576-primary.xml.gz"/>
|
||||||
|
<timestamp>1666177486</timestamp>
|
||||||
|
<size>1387</size>
|
||||||
|
<open-size>6528</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="filelists">
|
||||||
|
<checksum type="sha256">f69ca03957574729fd5150335b0d87afddcfb37a97aed5b06272212854f1773d</checksum>
|
||||||
|
<open-checksum type="sha256">c2e1e674d7d48bccaa16cae0a5f70cb55ef4cd7352b4d9d4fdaa619075d07dbc</open-checksum>
|
||||||
|
<location href="repodata/f69ca03957574729fd5150335b0d87afddcfb37a97aed5b06272212854f1773d-filelists.xml.gz"/>
|
||||||
|
<timestamp>1666177486</timestamp>
|
||||||
|
<size>1252</size>
|
||||||
|
<open-size>5594</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="other">
|
||||||
|
<checksum type="sha256">b3827bd6c9ea67ffa3912002515c64e4d9fe5c4dacbf7c46b0d8768b7abbb84f</checksum>
|
||||||
|
<open-checksum type="sha256">9ce24c526239e349d023c577b2ae3872c8b0f1888aed1fb24b9b9aa12063fdf3</open-checksum>
|
||||||
|
<location href="repodata/b3827bd6c9ea67ffa3912002515c64e4d9fe5c4dacbf7c46b0d8768b7abbb84f-other.xml.gz"/>
|
||||||
|
<timestamp>1666177486</timestamp>
|
||||||
|
<size>999</size>
|
||||||
|
<open-size>6320</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="primary_db">
|
||||||
|
<checksum type="sha256">ab8df35061dfa0285069b843f24a7076e31266d9a8abe8282340bcb936aa61d7</checksum>
|
||||||
|
<open-checksum type="sha256">2bce9554ce4496cef34b5cd69f186f7f3143c7cabae8fa384fc5c9eeab326f7f</open-checksum>
|
||||||
|
<location href="repodata/ab8df35061dfa0285069b843f24a7076e31266d9a8abe8282340bcb936aa61d7-primary.sqlite.bz2"/>
|
||||||
|
<timestamp>1666177486</timestamp>
|
||||||
|
<size>3558</size>
|
||||||
|
<open-size>106496</open-size>
|
||||||
|
<database_version>10</database_version>
|
||||||
|
</data>
|
||||||
|
<data type="filelists_db">
|
||||||
|
<checksum type="sha256">8bcf6d40db4e922934ac47e8ac7fb8d15bdacf579af8c819d2134ed54d30550b</checksum>
|
||||||
|
<open-checksum type="sha256">f7001d1df7f5f7e4898919b15710bea8ed9711ce42faf68e22b757e63169b1fb</open-checksum>
|
||||||
|
<location href="repodata/8bcf6d40db4e922934ac47e8ac7fb8d15bdacf579af8c819d2134ed54d30550b-filelists.sqlite.bz2"/>
|
||||||
|
<timestamp>1666177486</timestamp>
|
||||||
|
<size>2360</size>
|
||||||
|
<open-size>28672</open-size>
|
||||||
|
<database_version>10</database_version>
|
||||||
|
</data>
|
||||||
|
<data type="other_db">
|
||||||
|
<checksum type="sha256">01b82e9eb7ee9151f283c6e761ae450de18ed2d64b5e32de88689eaf95216a80</checksum>
|
||||||
|
<open-checksum type="sha256">07f5b9750af1e440d37ca216e719dd288149e79e9132f2fdccb6f73b2e5dd541</open-checksum>
|
||||||
|
<location href="repodata/01b82e9eb7ee9151f283c6e761ae450de18ed2d64b5e32de88689eaf95216a80-other.sqlite.bz2"/>
|
||||||
|
<timestamp>1666177486</timestamp>
|
||||||
|
<size>2196</size>
|
||||||
|
<open-size>32768</open-size>
|
||||||
|
<database_version>10</database_version>
|
||||||
|
</data>
|
||||||
|
</repomd>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,55 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
|
||||||
|
<revision>1666177500</revision>
|
||||||
|
<data type="primary">
|
||||||
|
<checksum type="sha256">a1d342aa7cef3a2034fc3f9d6ee02d63572780bc76e61749a57e50b6b3ca9869</checksum>
|
||||||
|
<open-checksum type="sha256">a9e3eae447dd44282d7d96db5f15f049b757925397adb752f4df982176bab7e0</open-checksum>
|
||||||
|
<location href="repodata/a1d342aa7cef3a2034fc3f9d6ee02d63572780bc76e61749a57e50b6b3ca9869-primary.xml.gz"/>
|
||||||
|
<timestamp>1666177500</timestamp>
|
||||||
|
<size>3501</size>
|
||||||
|
<open-size>37296</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="filelists">
|
||||||
|
<checksum type="sha256">6778922d5853d20f213ae7702699a76f1e87e55d6bfb5e4ac6a117d904d47b3c</checksum>
|
||||||
|
<open-checksum type="sha256">e30b666d9d88a70de69a08f45e6696bcd600c45485d856bd0213395d7da7bd49</open-checksum>
|
||||||
|
<location href="repodata/6778922d5853d20f213ae7702699a76f1e87e55d6bfb5e4ac6a117d904d47b3c-filelists.xml.gz"/>
|
||||||
|
<timestamp>1666177500</timestamp>
|
||||||
|
<size>27624</size>
|
||||||
|
<open-size>318187</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="other">
|
||||||
|
<checksum type="sha256">5a60d79d8bce6a805f4fdb22fd891524359dce8ccc665c0b54e7299e79debe84</checksum>
|
||||||
|
<open-checksum type="sha256">b18138f4a3de45714e578fb1f30b7ec54fdcdaf1a22585891625b6af0894388e</open-checksum>
|
||||||
|
<location href="repodata/5a60d79d8bce6a805f4fdb22fd891524359dce8ccc665c0b54e7299e79debe84-other.xml.gz"/>
|
||||||
|
<timestamp>1666177500</timestamp>
|
||||||
|
<size>1876</size>
|
||||||
|
<open-size>28701</open-size>
|
||||||
|
</data>
|
||||||
|
<data type="primary_db">
|
||||||
|
<checksum type="sha256">c27bc2ce947173aba305041552c3c6d8db71442c1a2e5dcaf35ff750fe0469fc</checksum>
|
||||||
|
<open-checksum type="sha256">586e1af8934229925adb9e746ae5ced119859dfd97f4e3237399bb36a7d7f071</open-checksum>
|
||||||
|
<location href="repodata/c27bc2ce947173aba305041552c3c6d8db71442c1a2e5dcaf35ff750fe0469fc-primary.sqlite.bz2"/>
|
||||||
|
<timestamp>1666177500</timestamp>
|
||||||
|
<size>11528</size>
|
||||||
|
<open-size>126976</open-size>
|
||||||
|
<database_version>10</database_version>
|
||||||
|
</data>
|
||||||
|
<data type="filelists_db">
|
||||||
|
<checksum type="sha256">ed350865982e7a1e45b144839b56eac888e5d8f680571dd2cd06b37dc83e0fd8</checksum>
|
||||||
|
<open-checksum type="sha256">697903989d0f77de2d44a2b603e75c9b4ca23b3795eb136d175caf5666ce6459</open-checksum>
|
||||||
|
<location href="repodata/ed350865982e7a1e45b144839b56eac888e5d8f680571dd2cd06b37dc83e0fd8-filelists.sqlite.bz2"/>
|
||||||
|
<timestamp>1666177500</timestamp>
|
||||||
|
<size>20440</size>
|
||||||
|
<open-size>163840</open-size>
|
||||||
|
<database_version>10</database_version>
|
||||||
|
</data>
|
||||||
|
<data type="other_db">
|
||||||
|
<checksum type="sha256">35eff699131e0976429144c6f4514d21568177dc64bb4091c3ff62f76b293725</checksum>
|
||||||
|
<open-checksum type="sha256">3bd999a1bdf300df836a4607b7b75f845d8e1432e3e4e1ab6f0c7cc8a853db39</open-checksum>
|
||||||
|
<location href="repodata/35eff699131e0976429144c6f4514d21568177dc64bb4091c3ff62f76b293725-other.sqlite.bz2"/>
|
||||||
|
<timestamp>1666177500</timestamp>
|
||||||
|
<size>4471</size>
|
||||||
|
<open-size>49152</open-size>
|
||||||
|
<database_version>10</database_version>
|
||||||
|
</data>
|
||||||
|
</repomd>
|
|
@ -0,0 +1,58 @@
|
||||||
|
[checksums]
|
||||||
|
images/boot.iso = sha256:fc8a4be604b6425746f12fa706116eb940f93358f036b8fbbe518b516cb6870c
|
||||||
|
|
||||||
|
[general]
|
||||||
|
; WARNING.0 = This section provides compatibility with pre-productmd treeinfos.
|
||||||
|
; WARNING.1 = Read productmd documentation for details about new format.
|
||||||
|
arch = x86_64
|
||||||
|
family = Test
|
||||||
|
name = Test 1.0
|
||||||
|
packagedir = Packages
|
||||||
|
platforms = x86_64,xen
|
||||||
|
repository = .
|
||||||
|
timestamp = 1531881582
|
||||||
|
variant = Server
|
||||||
|
variants = Client,Server
|
||||||
|
version = 1.0
|
||||||
|
|
||||||
|
[header]
|
||||||
|
type = productmd.treeinfo
|
||||||
|
version = 1.2
|
||||||
|
|
||||||
|
[images-x86_64]
|
||||||
|
boot.iso = images/boot.iso
|
||||||
|
|
||||||
|
[images-xen]
|
||||||
|
initrd = images/pxeboot/initrd.img
|
||||||
|
kernel = images/pxeboot/vmlinuz
|
||||||
|
|
||||||
|
[release]
|
||||||
|
name = Test
|
||||||
|
short = T
|
||||||
|
version = 1.0
|
||||||
|
|
||||||
|
[stage2]
|
||||||
|
mainimage = images/install.img
|
||||||
|
|
||||||
|
[tree]
|
||||||
|
arch = x86_64
|
||||||
|
build_timestamp = 1531881582
|
||||||
|
platforms = x86_64,xen
|
||||||
|
variants = Client,Server
|
||||||
|
|
||||||
|
[variant-Client]
|
||||||
|
id = Client
|
||||||
|
name = Client
|
||||||
|
packages = ../../../Client/x86_64/os/Packages
|
||||||
|
repository = ../../../Client/x86_64/os
|
||||||
|
type = variant
|
||||||
|
uid = Client
|
||||||
|
|
||||||
|
[variant-Server]
|
||||||
|
id = Server
|
||||||
|
name = Server
|
||||||
|
packages = Packages
|
||||||
|
repository = .
|
||||||
|
type = variant
|
||||||
|
uid = Server
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: module
|
||||||
|
stream: master
|
||||||
|
version: 20190318
|
||||||
|
context: abcdef
|
||||||
|
arch: x86_64
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: module
|
||||||
|
stream: master
|
||||||
|
version: 20190318
|
||||||
|
context: abcdef
|
||||||
|
arch: x86_64
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: scratch-module
|
||||||
|
stream: master
|
||||||
|
version: 20200710
|
||||||
|
context: abcdef
|
||||||
|
arch: x86_64
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: scratch-module
|
||||||
|
stream: master
|
||||||
|
version: 20200710
|
||||||
|
context: abcdef
|
||||||
|
arch: x86_64
|
||||||
|
summary: Dummy module
|
||||||
|
description: Dummy module
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- Beerware
|
||||||
|
content:
|
||||||
|
- Beerware
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- foobar-0:1.0-1.noarch
|
||||||
|
...
|
|
@ -7,7 +7,7 @@ import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import six
|
import six
|
||||||
from kobo.rpmlib import parse_nvr
|
from kobo.rpmlib import parse_nvr
|
||||||
|
|
||||||
|
@ -21,6 +21,15 @@ from pungi import paths, checks
|
||||||
from pungi.module_util import Modulemd
|
from pungi.module_util import Modulemd
|
||||||
|
|
||||||
|
|
||||||
|
GIT_WITH_CREDS = [
|
||||||
|
"git",
|
||||||
|
"-c",
|
||||||
|
"credential.useHttpPath=true",
|
||||||
|
"-c",
|
||||||
|
"credential.helper=!ch",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class BaseTestCase(unittest.TestCase):
|
class BaseTestCase(unittest.TestCase):
|
||||||
def assertFilesEqual(self, fn1, fn2):
|
def assertFilesEqual(self, fn1, fn2):
|
||||||
with open(fn1, "rb") as f1:
|
with open(fn1, "rb") as f1:
|
||||||
|
@ -158,6 +167,20 @@ class IterableMock(mock.Mock):
|
||||||
return iter([])
|
return iter([])
|
||||||
|
|
||||||
|
|
||||||
|
class FSKojiDownloader(object):
|
||||||
|
"""Mock for KojiDownloadProxy that checks provided path."""
|
||||||
|
|
||||||
|
def get_file(self, path, validator=None):
|
||||||
|
return path if os.path.isfile(path) else None
|
||||||
|
|
||||||
|
|
||||||
|
class DummyKojiDownloader(object):
|
||||||
|
"""Mock for KojiDownloadProxy that always finds the file in original location."""
|
||||||
|
|
||||||
|
def get_file(self, path, validator=None):
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
class DummyCompose(object):
|
class DummyCompose(object):
|
||||||
def __init__(self, topdir, config):
|
def __init__(self, topdir, config):
|
||||||
self.supported = True
|
self.supported = True
|
||||||
|
@ -232,6 +255,8 @@ class DummyCompose(object):
|
||||||
self.cache_region = None
|
self.cache_region = None
|
||||||
self.containers_metadata = {}
|
self.containers_metadata = {}
|
||||||
self.load_old_compose_config = mock.Mock(return_value=None)
|
self.load_old_compose_config = mock.Mock(return_value=None)
|
||||||
|
self.koji_downloader = DummyKojiDownloader()
|
||||||
|
self.koji_downloader.path_prefix = "/prefix"
|
||||||
|
|
||||||
def setup_optional(self):
|
def setup_optional(self):
|
||||||
self.all_variants["Server-optional"] = MockVariant(
|
self.all_variants["Server-optional"] = MockVariant(
|
||||||
|
@ -272,7 +297,7 @@ class DummyCompose(object):
|
||||||
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=self.topdir)
|
return tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=self.topdir)
|
||||||
|
|
||||||
|
|
||||||
def touch(path, content=None):
|
def touch(path, content=None, mode=None):
|
||||||
"""Helper utility that creates an dummy file in given location. Directories
|
"""Helper utility that creates an dummy file in given location. Directories
|
||||||
will be created."""
|
will be created."""
|
||||||
content = content or (path + "\n")
|
content = content or (path + "\n")
|
||||||
|
@ -284,6 +309,8 @@ def touch(path, content=None):
|
||||||
content = content.encode()
|
content = content.encode()
|
||||||
with open(path, "wb") as f:
|
with open(path, "wb") as f:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
|
if mode:
|
||||||
|
os.chmod(path, mode)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from pungi.arch import (
|
from pungi.arch import (
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import mock
|
from unittest import mock
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import six
|
import six
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
|
from ddt import ddt, data
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -1209,6 +1209,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1308,6 +1309,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"lorax_use_koji_plugin": True,
|
"lorax_use_koji_plugin": True,
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1412,6 +1414,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "buildinstall",
|
"buildinstall_method": "buildinstall",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1500,6 +1503,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "buildinstall",
|
"buildinstall_method": "buildinstall",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1542,6 +1546,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1591,6 +1596,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1663,6 +1669,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
"failable_deliverables": [("^.+$", {"*": ["buildinstall"]})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -1701,6 +1708,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"runroot_weights": {"buildinstall": 123},
|
"runroot_weights": {"buildinstall": 123},
|
||||||
"buildinstall_topdir": "/buildinstall_topdir",
|
"buildinstall_topdir": "/buildinstall_topdir",
|
||||||
},
|
},
|
||||||
|
@ -1810,6 +1818,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "rrt",
|
"runroot_tag": "rrt",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -2005,6 +2014,7 @@ class BuildinstallThreadTestCase(PungiTestCase):
|
||||||
self.assertEqual(ret, None)
|
self.assertEqual(ret, None)
|
||||||
|
|
||||||
|
|
||||||
|
@ddt
|
||||||
class TestSymlinkIso(PungiTestCase):
|
class TestSymlinkIso(PungiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestSymlinkIso, self).setUp()
|
super(TestSymlinkIso, self).setUp()
|
||||||
|
@ -2020,8 +2030,13 @@ class TestSymlinkIso(PungiTestCase):
|
||||||
@mock.patch("pungi.phases.buildinstall.get_file_size")
|
@mock.patch("pungi.phases.buildinstall.get_file_size")
|
||||||
@mock.patch("pungi.phases.buildinstall.iso")
|
@mock.patch("pungi.phases.buildinstall.iso")
|
||||||
@mock.patch("pungi.phases.buildinstall.run")
|
@mock.patch("pungi.phases.buildinstall.run")
|
||||||
def test_hardlink(self, run, iso, get_file_size, get_mtime, ImageCls):
|
@data(['Server'], ['BaseOS'])
|
||||||
self.compose.conf = {"buildinstall_symlink": False, "disc_types": {}}
|
def test_hardlink(self, netinstall_variants, run, iso, get_file_size, get_mtime, ImageCls):
|
||||||
|
self.compose.conf = {
|
||||||
|
"buildinstall_symlink": False,
|
||||||
|
"disc_types": {},
|
||||||
|
"netinstall_variants": netinstall_variants,
|
||||||
|
}
|
||||||
get_file_size.return_value = 1024
|
get_file_size.return_value = 1024
|
||||||
get_mtime.return_value = 13579
|
get_mtime.return_value = 13579
|
||||||
|
|
||||||
|
@ -2071,9 +2086,14 @@ class TestSymlinkIso(PungiTestCase):
|
||||||
self.assertEqual(image.bootable, True)
|
self.assertEqual(image.bootable, True)
|
||||||
self.assertEqual(image.implant_md5, iso.get_implanted_md5.return_value)
|
self.assertEqual(image.implant_md5, iso.get_implanted_md5.return_value)
|
||||||
self.assertEqual(image.can_fail, False)
|
self.assertEqual(image.can_fail, False)
|
||||||
self.assertEqual(
|
if 'Server' in netinstall_variants:
|
||||||
self.compose.im.add.mock_calls, [mock.call("Server", "x86_64", image)]
|
self.assertEqual(
|
||||||
)
|
self.compose.im.add.mock_calls, [mock.call("Server", "x86_64", image)]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.assertEqual(
|
||||||
|
self.compose.im.add.mock_calls, []
|
||||||
|
)
|
||||||
|
|
||||||
@mock.patch("pungi.phases.buildinstall.Image")
|
@mock.patch("pungi.phases.buildinstall.Image")
|
||||||
@mock.patch("pungi.phases.buildinstall.get_mtime")
|
@mock.patch("pungi.phases.buildinstall.get_mtime")
|
||||||
|
@ -2086,6 +2106,7 @@ class TestSymlinkIso(PungiTestCase):
|
||||||
self.compose.conf = {
|
self.compose.conf = {
|
||||||
"buildinstall_symlink": False,
|
"buildinstall_symlink": False,
|
||||||
"disc_types": {"boot": "netinst"},
|
"disc_types": {"boot": "netinst"},
|
||||||
|
"netinstall_variants": ['Server'],
|
||||||
}
|
}
|
||||||
get_file_size.return_value = 1024
|
get_file_size.return_value = 1024
|
||||||
get_mtime.return_value = 13579
|
get_mtime.return_value = 13579
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import mock
|
from unittest import mock
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
|
@ -628,6 +628,7 @@ class ComposeTestCase(unittest.TestCase):
|
||||||
ci_copy = dict(self.ci_json)
|
ci_copy = dict(self.ci_json)
|
||||||
ci_copy["header"]["version"] = "1.2"
|
ci_copy["header"]["version"] = "1.2"
|
||||||
mocked_response = mock.MagicMock()
|
mocked_response = mock.MagicMock()
|
||||||
|
mocked_response.status_code = 200
|
||||||
mocked_response.text = json.dumps(self.ci_json)
|
mocked_response.text = json.dumps(self.ci_json)
|
||||||
mocked_requests.post.return_value = mocked_response
|
mocked_requests.post.return_value = mocked_response
|
||||||
|
|
||||||
|
@ -655,6 +656,7 @@ class ComposeTestCase(unittest.TestCase):
|
||||||
mocked_requests.post.assert_called_once_with(
|
mocked_requests.post.assert_called_once_with(
|
||||||
"https://cts.localhost.tld/api/1/composes/",
|
"https://cts.localhost.tld/api/1/composes/",
|
||||||
auth=mock.ANY,
|
auth=mock.ANY,
|
||||||
|
data=None,
|
||||||
json=expected_json,
|
json=expected_json,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -793,12 +795,16 @@ class TracebackTest(unittest.TestCase):
|
||||||
shutil.rmtree(self.tmp_dir)
|
shutil.rmtree(self.tmp_dir)
|
||||||
self.patcher.stop()
|
self.patcher.stop()
|
||||||
|
|
||||||
def assertTraceback(self, filename):
|
def assertTraceback(self, filename, show_locals=True):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
os.path.isfile("%s/logs/global/%s.global.log" % (self.tmp_dir, filename))
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.Traceback.mock_calls, [mock.call(), mock.call().get_traceback()]
|
self.Traceback.mock_calls,
|
||||||
|
[
|
||||||
|
mock.call(show_locals=show_locals),
|
||||||
|
mock.call(show_locals=show_locals).get_traceback(),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_traceback_default(self):
|
def test_traceback_default(self):
|
||||||
|
@ -811,6 +817,7 @@ class TracebackTest(unittest.TestCase):
|
||||||
|
|
||||||
|
|
||||||
class RetryRequestTest(unittest.TestCase):
|
class RetryRequestTest(unittest.TestCase):
|
||||||
|
@mock.patch("time.sleep", new=lambda x: x)
|
||||||
@mock.patch("pungi.compose.requests")
|
@mock.patch("pungi.compose.requests")
|
||||||
def test_retry_timeout(self, mocked_requests):
|
def test_retry_timeout(self, mocked_requests):
|
||||||
mocked_requests.post.side_effect = [
|
mocked_requests.post.side_effect = [
|
||||||
|
@ -822,8 +829,22 @@ class RetryRequestTest(unittest.TestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mocked_requests.mock_calls,
|
mocked_requests.mock_calls,
|
||||||
[
|
[
|
||||||
mock.call.post(url, json=None, auth=None),
|
mock.call.post(url, data=None, json=None, auth=None),
|
||||||
mock.call.post(url, json=None, auth=None),
|
mock.call.post(url, data=None, json=None, auth=None),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
self.assertEqual(rv.status_code, 200)
|
self.assertEqual(rv.status_code, 200)
|
||||||
|
|
||||||
|
@mock.patch("pungi.compose.requests")
|
||||||
|
def test_no_retry_on_client_error(self, mocked_requests):
|
||||||
|
mocked_requests.post.side_effect = [
|
||||||
|
mock.Mock(status_code=400, json=lambda: {"message": "You made a mistake"}),
|
||||||
|
]
|
||||||
|
url = "http://locahost/api/1/composes/"
|
||||||
|
with self.assertRaises(RuntimeError):
|
||||||
|
retry_request("post", url)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
mocked_requests.mock_calls,
|
||||||
|
[mock.call.post(url, data=None, json=None, auth=None)],
|
||||||
|
)
|
||||||
|
|
|
@ -7,7 +7,7 @@ except ImportError:
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import mock
|
from unittest import mock
|
||||||
|
|
||||||
from pungi import checks
|
from pungi import checks
|
||||||
from tests.helpers import load_config, PKGSET_REPOS
|
from tests.helpers import load_config, PKGSET_REPOS
|
||||||
|
@ -440,7 +440,7 @@ class LiveMediaConfigTestCase(ConfigTestCase):
|
||||||
live_media_version="Rawhide",
|
live_media_version="Rawhide",
|
||||||
)
|
)
|
||||||
|
|
||||||
resolve_git_url.side_effect = lambda x: x.replace("HEAD", "CAFE")
|
resolve_git_url.side_effect = lambda x, _helper: x.replace("HEAD", "CAFE")
|
||||||
|
|
||||||
self.assertValidation(cfg)
|
self.assertValidation(cfg)
|
||||||
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
self.assertEqual(cfg["live_media_ksurl"], "git://example.com/repo.git#CAFE")
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import os
|
import os
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,228 @@
|
||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
import os
|
||||||
|
from unittest import TestCase, mock, main
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from pungi.scripts.create_extra_repo import CreateExtraRepo, ExtraVariantInfo, RepoInfo
|
||||||
|
|
||||||
|
FOLDER_WITH_TEST_DATA = os.path.join(
|
||||||
|
os.path.dirname(
|
||||||
|
os.path.abspath(__file__)
|
||||||
|
),
|
||||||
|
'data/test_create_extra_repo/',
|
||||||
|
)
|
||||||
|
|
||||||
|
TEST_MODULE_INFO = yaml.load("""
|
||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: perl-App-cpanminus
|
||||||
|
stream: 1.7044
|
||||||
|
version: 8030020210126085450
|
||||||
|
context: 3a33b840
|
||||||
|
arch: x86_64
|
||||||
|
summary: Get, unpack, build and install CPAN modules
|
||||||
|
description: >
|
||||||
|
This is a CPAN client that requires zero configuration, and stands alone but it's
|
||||||
|
maintainable and extensible with plug-ins and friendly to shell scripting.
|
||||||
|
license:
|
||||||
|
module:
|
||||||
|
- MIT
|
||||||
|
content:
|
||||||
|
- (GPL+ or Artistic) and GPLv2+
|
||||||
|
- ASL 2.0
|
||||||
|
- GPL+ or Artistic
|
||||||
|
dependencies:
|
||||||
|
- buildrequires:
|
||||||
|
perl: [5.30]
|
||||||
|
platform: [el8.3.0]
|
||||||
|
requires:
|
||||||
|
perl: [5.30]
|
||||||
|
perl-YAML: []
|
||||||
|
platform: [el8]
|
||||||
|
references:
|
||||||
|
community: https://metacpan.org/release/App-cpanminus
|
||||||
|
profiles:
|
||||||
|
common:
|
||||||
|
description: App-cpanminus distribution
|
||||||
|
rpms:
|
||||||
|
- perl-App-cpanminus
|
||||||
|
api:
|
||||||
|
rpms:
|
||||||
|
- perl-App-cpanminus
|
||||||
|
filter:
|
||||||
|
rpms:
|
||||||
|
- perl-CPAN-DistnameInfo-dummy
|
||||||
|
- perl-Test-Deep
|
||||||
|
buildopts:
|
||||||
|
rpms:
|
||||||
|
macros: >
|
||||||
|
%_without_perl_CPAN_Meta_Check_enables_extra_test 1
|
||||||
|
components:
|
||||||
|
rpms:
|
||||||
|
perl-App-cpanminus:
|
||||||
|
rationale: The API.
|
||||||
|
ref: perl-App-cpanminus-1.7044-5.module+el8.2.0+4278+abcfa81a.src.rpm
|
||||||
|
buildorder: 1
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
perl-CPAN-DistnameInfo:
|
||||||
|
rationale: Run-time dependency.
|
||||||
|
ref: stream-0.12-rhel-8.3.0
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
perl-CPAN-Meta-Check:
|
||||||
|
rationale: Run-time dependency.
|
||||||
|
ref: perl-CPAN-Meta-Check-0.014-6.module+el8.2.0+4278+abcfa81a.src.rpm
|
||||||
|
buildorder: 1
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
perl-File-pushd:
|
||||||
|
rationale: Run-time dependency.
|
||||||
|
ref: perl-File-pushd-1.014-6.module+el8.2.0+4278+abcfa81a.src.rpm
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
perl-Module-CPANfile:
|
||||||
|
rationale: Run-time dependency.
|
||||||
|
ref: perl-Module-CPANfile-1.1002-7.module+el8.2.0+4278+abcfa81a.src.rpm
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
perl-Parse-PMFile:
|
||||||
|
rationale: Run-time dependency.
|
||||||
|
ref: perl-Parse-PMFile-0.41-7.module+el8.2.0+4278+abcfa81a.src.rpm
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
perl-String-ShellQuote:
|
||||||
|
rationale: Run-time dependency.
|
||||||
|
ref: perl-String-ShellQuote-1.04-24.module+el8.2.0+4278+abcfa81a.src.rpm
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
perl-Test-Deep:
|
||||||
|
rationale: Build-time dependency.
|
||||||
|
ref: stream-1.127-rhel-8.3.0
|
||||||
|
arches: [i686, x86_64]
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- perl-App-cpanminus-0:1.7044-5.module_el8.3.0+2027+c8990d1d.noarch
|
||||||
|
- perl-App-cpanminus-0:1.7044-5.module_el8.3.0+2027+c8990d1d.src
|
||||||
|
- perl-CPAN-Meta-Check-0:0.014-6.module_el8.3.0+2027+c8990d1d.noarch
|
||||||
|
- perl-CPAN-Meta-Check-0:0.014-6.module_el8.3.0+2027+c8990d1d.src
|
||||||
|
- perl-File-pushd-0:1.014-6.module_el8.3.0+2027+c8990d1d.noarch
|
||||||
|
- perl-File-pushd-0:1.014-6.module_el8.3.0+2027+c8990d1d.src
|
||||||
|
- perl-Module-CPANfile-0:1.1002-7.module_el8.3.0+2027+c8990d1d.noarch
|
||||||
|
- perl-Module-CPANfile-0:1.1002-7.module_el8.3.0+2027+c8990d1d.src
|
||||||
|
- perl-Parse-PMFile-0:0.41-7.module_el8.3.0+2027+c8990d1d.noarch
|
||||||
|
- perl-Parse-PMFile-0:0.41-7.module_el8.3.0+2027+c8990d1d.src
|
||||||
|
- perl-String-ShellQuote-0:1.04-24.module_el8.3.0+2027+c8990d1d.noarch
|
||||||
|
- perl-String-ShellQuote-0:1.04-24.module_el8.3.0+2027+c8990d1d.src
|
||||||
|
...
|
||||||
|
""", Loader=yaml.BaseLoader)
|
||||||
|
|
||||||
|
TEST_REPO_INFO = RepoInfo(
|
||||||
|
path=FOLDER_WITH_TEST_DATA,
|
||||||
|
folder='test_repo',
|
||||||
|
is_remote=False,
|
||||||
|
)
|
||||||
|
TEST_VARIANT_INFO = ExtraVariantInfo(
|
||||||
|
name='TestRepo',
|
||||||
|
arch='x86_64',
|
||||||
|
packages=[],
|
||||||
|
modules=[],
|
||||||
|
repos=[TEST_REPO_INFO]
|
||||||
|
)
|
||||||
|
|
||||||
|
BS_BUILD_INFO = {
|
||||||
|
'build_platforms': [
|
||||||
|
{
|
||||||
|
'architectures': ['non_fake_arch', 'fake_arch'],
|
||||||
|
'name': 'fake_platform'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreteExtraRepo(TestCase):
|
||||||
|
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
def test_01_get_repo_info_from_bs_repo(self):
|
||||||
|
auth_token = 'fake_auth_token'
|
||||||
|
build_id = 'fake_build_id'
|
||||||
|
arch = 'fake_arch'
|
||||||
|
packages = ['fake_package1', 'fake_package2']
|
||||||
|
modules = ['fake_module1', 'fake_module2']
|
||||||
|
|
||||||
|
request_object = mock.Mock()
|
||||||
|
request_object.raise_for_status = lambda: True
|
||||||
|
request_object.json = lambda: BS_BUILD_INFO
|
||||||
|
with mock.patch(
|
||||||
|
'pungi.scripts.create_extra_repo.requests.get',
|
||||||
|
return_value=request_object,
|
||||||
|
) as mock_request_get:
|
||||||
|
repos_info = CreateExtraRepo.get_repo_info_from_bs_repo(
|
||||||
|
auth_token=auth_token,
|
||||||
|
build_id=build_id,
|
||||||
|
arch=arch,
|
||||||
|
packages=packages,
|
||||||
|
modules=modules,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
[
|
||||||
|
ExtraVariantInfo(
|
||||||
|
name=f'{build_id}-fake_platform-{arch}',
|
||||||
|
arch=arch,
|
||||||
|
packages=packages,
|
||||||
|
modules=modules,
|
||||||
|
repos=[
|
||||||
|
RepoInfo(
|
||||||
|
path='https://build.cloudlinux.com/'
|
||||||
|
f'build_repos/{build_id}/fake_platform',
|
||||||
|
folder=arch,
|
||||||
|
is_remote=True,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
],
|
||||||
|
repos_info,
|
||||||
|
)
|
||||||
|
mock_request_get.assert_called_once_with(
|
||||||
|
url=f'https://build.cloudlinux.com/api/v1/builds/{build_id}',
|
||||||
|
headers={
|
||||||
|
'Authorization': f'Bearer {auth_token}',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_02_create_extra_repo(self):
|
||||||
|
with mock.patch(
|
||||||
|
'pungi.scripts.create_extra_repo.'
|
||||||
|
'CreateExtraRepo._read_local_modules_yaml',
|
||||||
|
return_value=[],
|
||||||
|
) as mock__read_local_modules_yaml, mock.patch(
|
||||||
|
'pungi.scripts.create_extra_repo.'
|
||||||
|
'CreateExtraRepo._download_rpm_to_local_repo',
|
||||||
|
) as mock__download_rpm_to_local_repo, mock.patch(
|
||||||
|
'pungi.scripts.create_extra_repo.'
|
||||||
|
'CreateExtraRepo._dump_local_modules_yaml'
|
||||||
|
) as mock__dump_local_modules_yaml, mock.patch(
|
||||||
|
'pungi.scripts.create_extra_repo.'
|
||||||
|
'CreateExtraRepo._create_local_extra_repo'
|
||||||
|
) as mock__create_local_extra_repo:
|
||||||
|
cer = CreateExtraRepo(
|
||||||
|
variants=[TEST_VARIANT_INFO],
|
||||||
|
bs_auth_token='fake_auth_token',
|
||||||
|
local_repository_path='/path/to/local/repo',
|
||||||
|
clear_target_repo=False,
|
||||||
|
)
|
||||||
|
mock__read_local_modules_yaml.assert_called_once_with()
|
||||||
|
cer.create_extra_repo()
|
||||||
|
mock__download_rpm_to_local_repo.assert_called_once_with(
|
||||||
|
package_location='perl-App-cpanminus-1.7044-5.'
|
||||||
|
'module_el8.3.0+2027+c8990d1d.noarch.rpm',
|
||||||
|
repo_info=TEST_REPO_INFO,
|
||||||
|
)
|
||||||
|
mock__dump_local_modules_yaml.assert_called_once_with()
|
||||||
|
mock__create_local_extra_repo.assert_called_once_with()
|
||||||
|
self.assertEqual(
|
||||||
|
[TEST_MODULE_INFO],
|
||||||
|
cer.local_modules_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,112 @@
|
||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
import os
|
||||||
|
from collections import defaultdict
|
||||||
|
from unittest import TestCase, mock, main
|
||||||
|
|
||||||
|
from pungi.scripts.create_packages_json import (
|
||||||
|
PackagesGenerator,
|
||||||
|
RepoInfo,
|
||||||
|
VariantInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
FOLDER_WITH_TEST_DATA = os.path.join(
|
||||||
|
os.path.dirname(
|
||||||
|
os.path.abspath(__file__)
|
||||||
|
),
|
||||||
|
'data/test_create_packages_json/',
|
||||||
|
)
|
||||||
|
|
||||||
|
test_repo_info = RepoInfo(
|
||||||
|
path=FOLDER_WITH_TEST_DATA,
|
||||||
|
folder='test_repo',
|
||||||
|
is_remote=False,
|
||||||
|
is_reference=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
test_repo_info_2 = RepoInfo(
|
||||||
|
path=FOLDER_WITH_TEST_DATA,
|
||||||
|
folder='test_repo_2',
|
||||||
|
is_remote=False,
|
||||||
|
is_reference=True,
|
||||||
|
)
|
||||||
|
variant_info_1 = VariantInfo(
|
||||||
|
name='TestRepo',
|
||||||
|
arch='x86_64',
|
||||||
|
repos=[test_repo_info]
|
||||||
|
)
|
||||||
|
variant_info_2 = VariantInfo(
|
||||||
|
name='TestRepo2',
|
||||||
|
arch='x86_64',
|
||||||
|
repos=[test_repo_info_2]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestPackagesJson(TestCase):
|
||||||
|
def test_01_get_remote_file_content(self):
|
||||||
|
"""
|
||||||
|
Test the getting of content from a remote file
|
||||||
|
"""
|
||||||
|
request_object = mock.Mock()
|
||||||
|
request_object.raise_for_status = lambda: True
|
||||||
|
request_object.content = b'TestContent'
|
||||||
|
with mock.patch(
|
||||||
|
'pungi.scripts.create_packages_json.requests.get',
|
||||||
|
return_value=request_object,
|
||||||
|
) as mock_requests_get, mock.patch(
|
||||||
|
'pungi.scripts.create_packages_json.tempfile.NamedTemporaryFile',
|
||||||
|
) as mock_tempfile:
|
||||||
|
mock_tempfile.return_value.__enter__.return_value.name = 'tmpfile'
|
||||||
|
packages_generator = PackagesGenerator(
|
||||||
|
variants=[],
|
||||||
|
excluded_packages=[],
|
||||||
|
included_packages=[],
|
||||||
|
)
|
||||||
|
file_name = packages_generator.get_remote_file_content(
|
||||||
|
file_url='fakeurl')
|
||||||
|
mock_requests_get.assert_called_once_with(url='fakeurl')
|
||||||
|
mock_tempfile.assert_called_once_with(delete=False)
|
||||||
|
mock_tempfile.return_value.__enter__().\
|
||||||
|
write.assert_called_once_with(b'TestContent')
|
||||||
|
self.assertEqual(
|
||||||
|
file_name,
|
||||||
|
'tmpfile',
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_02_generate_additional_packages(self):
|
||||||
|
pg = PackagesGenerator(
|
||||||
|
variants=[
|
||||||
|
variant_info_1,
|
||||||
|
variant_info_2,
|
||||||
|
],
|
||||||
|
excluded_packages=['zziplib-utils'],
|
||||||
|
included_packages=['vim-file*'],
|
||||||
|
)
|
||||||
|
test_packages = defaultdict(
|
||||||
|
lambda: defaultdict(
|
||||||
|
lambda: defaultdict(
|
||||||
|
list,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
test_packages['TestRepo']['x86_64']['zziplib'] = \
|
||||||
|
[
|
||||||
|
'zziplib.i686',
|
||||||
|
'zziplib.x86_64',
|
||||||
|
]
|
||||||
|
test_packages['TestRepo2']['x86_64']['vim'] = \
|
||||||
|
[
|
||||||
|
'vim-X11.i686',
|
||||||
|
'vim-common.i686',
|
||||||
|
'vim-enhanced.i686',
|
||||||
|
'vim-filesystem.noarch',
|
||||||
|
]
|
||||||
|
result = pg.generate_packages_json()
|
||||||
|
self.assertEqual(
|
||||||
|
test_packages,
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import mock
|
from unittest import mock
|
||||||
import six
|
import six
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
@ -552,6 +552,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
|
@ -633,6 +634,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"create_jigdo": False,
|
"create_jigdo": False,
|
||||||
"runroot_weights": {"createiso": 123},
|
"runroot_weights": {"createiso": 123},
|
||||||
},
|
},
|
||||||
|
@ -717,6 +719,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
||||||
"buildinstall_method": "lorax",
|
"buildinstall_method": "lorax",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
|
@ -807,6 +810,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
cmd = {
|
cmd = {
|
||||||
|
@ -839,6 +843,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -881,6 +886,7 @@ class CreateisoThreadTest(helpers.PungiTestCase):
|
||||||
"release_version": "1.0",
|
"release_version": "1.0",
|
||||||
"runroot_tag": "f25-build",
|
"runroot_tag": "f25-build",
|
||||||
"koji_profile": "koji",
|
"koji_profile": "koji",
|
||||||
|
"koji_cache": "/tmp",
|
||||||
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
"failable_deliverables": [("^.*$", {"*": "iso"})],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
|
from parameterized import parameterized
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from six.moves import StringIO
|
from six.moves import StringIO
|
||||||
|
@ -391,3 +392,27 @@ class CreateIsoScriptTest(helpers.PungiTestCase):
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[("644", 0o644), ("664", 0o664), ("666", 0o666), ("2644", 0o2644)]
|
||||||
|
)
|
||||||
|
def test_get_perms_non_executable(self, test_name, mode):
|
||||||
|
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
||||||
|
self.assertEqual(createiso._get_perms(path), 0o444)
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
("544", 0o544),
|
||||||
|
("554", 0o554),
|
||||||
|
("555", 0o555),
|
||||||
|
("744", 0o744),
|
||||||
|
("755", 0o755),
|
||||||
|
("774", 0o774),
|
||||||
|
("775", 0o775),
|
||||||
|
("777", 0o777),
|
||||||
|
("2775", 0o2775),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_get_perms_executable(self, test_name, mode):
|
||||||
|
path = helpers.touch(os.path.join(self.topdir, "f"), mode=mode)
|
||||||
|
self.assertEqual(createiso._get_perms(path), 0o555)
|
||||||
|
|
|
@ -8,7 +8,7 @@ except ImportError:
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from pungi.module_util import Modulemd
|
from pungi.module_util import Modulemd
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from productmd.extra_files import ExtraFiles
|
from productmd.extra_files import ExtraFiles
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
from typing import AnyStr, List
|
||||||
import logging
|
from unittest import mock
|
||||||
import mock
|
|
||||||
import six
|
import six
|
||||||
|
import logging
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -614,6 +614,7 @@ class GetExtraFilesTest(helpers.PungiTestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch("pungi.phases.extra_isos.tweak_repo_treeinfo")
|
||||||
@mock.patch("pungi.phases.extra_isos.tweak_treeinfo")
|
@mock.patch("pungi.phases.extra_isos.tweak_treeinfo")
|
||||||
@mock.patch("pungi.wrappers.iso.write_graft_points")
|
@mock.patch("pungi.wrappers.iso.write_graft_points")
|
||||||
@mock.patch("pungi.wrappers.iso.get_graft_points")
|
@mock.patch("pungi.wrappers.iso.get_graft_points")
|
||||||
|
@ -623,7 +624,7 @@ class GetIsoContentsTest(helpers.PungiTestCase):
|
||||||
self.compose = helpers.DummyCompose(self.topdir, {})
|
self.compose = helpers.DummyCompose(self.topdir, {})
|
||||||
self.variant = self.compose.variants["Server"]
|
self.variant = self.compose.variants["Server"]
|
||||||
|
|
||||||
def test_non_bootable_binary(self, ggp, wgp, tt):
|
def test_non_bootable_binary(self, ggp, wgp, tt, trt):
|
||||||
gp = {
|
gp = {
|
||||||
"compose/Client/x86_64/os/Packages": {"f/foo.rpm": "/mnt/f/foo.rpm"},
|
"compose/Client/x86_64/os/Packages": {"f/foo.rpm": "/mnt/f/foo.rpm"},
|
||||||
"compose/Client/x86_64/os/repodata": {
|
"compose/Client/x86_64/os/repodata": {
|
||||||
|
@ -693,7 +694,15 @@ class GetIsoContentsTest(helpers.PungiTestCase):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_inherit_extra_files(self, ggp, wgp, tt):
|
# Check correct call to tweak_repo_treeinfo
|
||||||
|
self._tweak_repo_treeinfo_call_list_checker(
|
||||||
|
trt_mock=trt,
|
||||||
|
main_variant='Server',
|
||||||
|
addon_variants=['Client'],
|
||||||
|
sub_path='x86_64/os',
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_inherit_extra_files(self, ggp, wgp, tt, trt):
|
||||||
gp = {
|
gp = {
|
||||||
"compose/Client/x86_64/os/Packages": {"f/foo.rpm": "/mnt/f/foo.rpm"},
|
"compose/Client/x86_64/os/Packages": {"f/foo.rpm": "/mnt/f/foo.rpm"},
|
||||||
"compose/Client/x86_64/os/repodata": {
|
"compose/Client/x86_64/os/repodata": {
|
||||||
|
@ -767,7 +776,15 @@ class GetIsoContentsTest(helpers.PungiTestCase):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_source(self, ggp, wgp, tt):
|
# Check correct call to tweak_repo_treeinfo
|
||||||
|
self._tweak_repo_treeinfo_call_list_checker(
|
||||||
|
trt_mock=trt,
|
||||||
|
main_variant='Server',
|
||||||
|
addon_variants=['Client'],
|
||||||
|
sub_path='x86_64/os',
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_source(self, ggp, wgp, tt, trt):
|
||||||
gp = {
|
gp = {
|
||||||
"compose/Client/source/tree/Packages": {"f/foo.rpm": "/mnt/f/foo.rpm"},
|
"compose/Client/source/tree/Packages": {"f/foo.rpm": "/mnt/f/foo.rpm"},
|
||||||
"compose/Client/source/tree/repodata": {
|
"compose/Client/source/tree/repodata": {
|
||||||
|
@ -837,7 +854,15 @@ class GetIsoContentsTest(helpers.PungiTestCase):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_bootable(self, ggp, wgp, tt):
|
# Check correct call to tweak_repo_treeinfo
|
||||||
|
self._tweak_repo_treeinfo_call_list_checker(
|
||||||
|
trt_mock=trt,
|
||||||
|
main_variant='Server',
|
||||||
|
addon_variants=['Client'],
|
||||||
|
sub_path='source/tree',
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_bootable(self, ggp, wgp, tt, trt):
|
||||||
self.compose.conf["buildinstall_method"] = "lorax"
|
self.compose.conf["buildinstall_method"] = "lorax"
|
||||||
|
|
||||||
bi_dir = os.path.join(self.topdir, "work/x86_64/buildinstall/Server")
|
bi_dir = os.path.join(self.topdir, "work/x86_64/buildinstall/Server")
|
||||||
|
@ -939,6 +964,42 @@ class GetIsoContentsTest(helpers.PungiTestCase):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Check correct call to tweak_repo_treeinfo
|
||||||
|
self._tweak_repo_treeinfo_call_list_checker(
|
||||||
|
trt_mock=trt,
|
||||||
|
main_variant='Server',
|
||||||
|
addon_variants=['Client'],
|
||||||
|
sub_path='x86_64/os',
|
||||||
|
)
|
||||||
|
|
||||||
|
def _tweak_repo_treeinfo_call_list_checker(
|
||||||
|
self,
|
||||||
|
trt_mock: mock.Mock,
|
||||||
|
main_variant: AnyStr,
|
||||||
|
addon_variants: List[AnyStr],
|
||||||
|
sub_path: AnyStr) -> None:
|
||||||
|
"""
|
||||||
|
Check correct call to tweak_repo_treeinfo
|
||||||
|
"""
|
||||||
|
path_to_treeinfo = os.path.join(
|
||||||
|
self.topdir,
|
||||||
|
'compose',
|
||||||
|
main_variant,
|
||||||
|
sub_path,
|
||||||
|
'.treeinfo',
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
trt_mock.call_args_list,
|
||||||
|
[
|
||||||
|
mock.call(
|
||||||
|
self.compose,
|
||||||
|
addon_variants,
|
||||||
|
path_to_treeinfo,
|
||||||
|
path_to_treeinfo,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class GetFilenameTest(helpers.PungiTestCase):
|
class GetFilenameTest(helpers.PungiTestCase):
|
||||||
def test_use_original_name(self):
|
def test_use_original_name(self):
|
||||||
|
@ -1016,6 +1077,15 @@ class TweakTreeinfoTest(helpers.PungiTestCase):
|
||||||
|
|
||||||
self.assertFilesEqual(output, expected)
|
self.assertFilesEqual(output, expected)
|
||||||
|
|
||||||
|
def test_repo_tweak(self):
|
||||||
|
compose = helpers.DummyCompose(self.topdir, {})
|
||||||
|
input = os.path.join(helpers.FIXTURE_DIR, "extraiso.treeinfo")
|
||||||
|
output = os.path.join(self.topdir, "actual-treeinfo")
|
||||||
|
expected = os.path.join(helpers.FIXTURE_DIR, "extraiso-tweaked-expected.treeinfo")
|
||||||
|
extra_isos.tweak_repo_treeinfo(compose, ["Client"], input, output)
|
||||||
|
|
||||||
|
self.assertFilesEqual(output, expected)
|
||||||
|
|
||||||
|
|
||||||
class PrepareMetadataTest(helpers.PungiTestCase):
|
class PrepareMetadataTest(helpers.PungiTestCase):
|
||||||
@mock.patch("pungi.metadata.create_media_repo")
|
@mock.patch("pungi.metadata.create_media_repo")
|
||||||
|
|
|
@ -153,7 +153,10 @@ class TestParseOutput(unittest.TestCase):
|
||||||
self.assertEqual(modules, set())
|
self.assertEqual(modules, set())
|
||||||
|
|
||||||
def test_extracts_modules(self):
|
def test_extracts_modules(self):
|
||||||
touch(self.file, "module:mod:master:20181003:cafebeef.x86_64@repo-0\n")
|
touch(
|
||||||
|
self.file,
|
||||||
|
"module:mod:master-1:20181003:cafebeef.x86_64@repo-0\n"
|
||||||
|
)
|
||||||
packages, modules = fus.parse_output(self.file)
|
packages, modules = fus.parse_output(self.file)
|
||||||
self.assertEqual(packages, set())
|
self.assertEqual(packages, set())
|
||||||
self.assertEqual(modules, set(["mod:master:20181003:cafebeef"]))
|
self.assertEqual(modules, set(["mod:master_1:20181003:cafebeef"]))
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
|
|
||||||
from pungi.phases.gather.methods import method_deps as deps
|
from pungi.phases.gather.methods import method_deps as deps
|
||||||
from tests import helpers
|
from tests import helpers
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
import copy
|
import copy
|
||||||
import mock
|
from unittest import mock
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
|
@ -0,0 +1,124 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import gzip
|
||||||
|
import os
|
||||||
|
from io import StringIO
|
||||||
|
import yaml
|
||||||
|
from pungi.scripts.gather_modules import collect_modules, EMPTY_FILE
|
||||||
|
import unittest
|
||||||
|
from pyfakefs.fake_filesystem_unittest import TestCase
|
||||||
|
|
||||||
|
MARIADB_MODULE = yaml.load("""
|
||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: mariadb-devel
|
||||||
|
stream: 10.3-1
|
||||||
|
version: 8010020200108182321
|
||||||
|
context: cdc1202b
|
||||||
|
arch: x86_64
|
||||||
|
summary: MariaDB Module
|
||||||
|
description: >-
|
||||||
|
MariaDB is a community developed branch of MySQL.
|
||||||
|
components:
|
||||||
|
rpms:
|
||||||
|
Judy:
|
||||||
|
rationale: MariaDB dependency for OQgraph computation engine
|
||||||
|
ref: a3583b33f939e74a530f2a1dff0552dff2c8ea73
|
||||||
|
buildorder: 4
|
||||||
|
arches: [aarch64, i686, ppc64le, x86_64]
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- Judy-0:1.0.5-18.module_el8.1.0+217+4d875839.i686
|
||||||
|
- Judy-debuginfo-0:1.0.5-18.module_el8.1.0+217+4d875839.i686
|
||||||
|
""", Loader=yaml.BaseLoader)
|
||||||
|
|
||||||
|
JAVAPACKAGES_TOOLS_MODULE = yaml.load("""
|
||||||
|
---
|
||||||
|
document: modulemd
|
||||||
|
version: 2
|
||||||
|
data:
|
||||||
|
name: javapackages-tools
|
||||||
|
stream: 201801
|
||||||
|
version: 8000020190628172923
|
||||||
|
context: b07bea58
|
||||||
|
arch: x86_64
|
||||||
|
summary: Tools and macros for Java packaging support
|
||||||
|
description: >-
|
||||||
|
Java Packages Tools is a collection of tools that make it easier to build RPM
|
||||||
|
packages containing software running on Java platform.
|
||||||
|
components:
|
||||||
|
rpms:
|
||||||
|
ant:
|
||||||
|
rationale: "Runtime dependency of ant-contrib"
|
||||||
|
ref: 2eaf095676540e2805ee7e8c7f6f78285c428fdc
|
||||||
|
arches: [aarch64, i686, ppc64le, x86_64]
|
||||||
|
artifacts:
|
||||||
|
rpms:
|
||||||
|
- ant-0:1.10.5-1.module_el8.0.0+30+832da3a1.noarch
|
||||||
|
- ant-0:1.10.5-1.module_el8.0.0+30+832da3a1.src
|
||||||
|
""", Loader=yaml.BaseLoader)
|
||||||
|
|
||||||
|
ANT_DEFAULTS = yaml.load("""
|
||||||
|
data:
|
||||||
|
module: ant
|
||||||
|
profiles:
|
||||||
|
'1.10':
|
||||||
|
- common
|
||||||
|
stream: '1.10'
|
||||||
|
document: modulemd-defaults
|
||||||
|
version: '1'
|
||||||
|
""", Loader=yaml.BaseLoader)
|
||||||
|
|
||||||
|
|
||||||
|
PATH_TO_KOJI = '/path/to/koji'
|
||||||
|
MODULES_YAML_GZ = 'modules.yaml.gz'
|
||||||
|
|
||||||
|
|
||||||
|
class TestModulesYamlParser(TestCase):
|
||||||
|
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.setUpPyfakefs()
|
||||||
|
|
||||||
|
def _prepare_test_data(self):
|
||||||
|
"""
|
||||||
|
Create modules.yaml.gz with some test data
|
||||||
|
"""
|
||||||
|
os.makedirs(PATH_TO_KOJI)
|
||||||
|
modules_gz_path = os.path.join(PATH_TO_KOJI, MODULES_YAML_GZ)
|
||||||
|
# dump modules into compressed file as in generic repos for rpm
|
||||||
|
io = StringIO()
|
||||||
|
yaml.dump_all([MARIADB_MODULE, JAVAPACKAGES_TOOLS_MODULE, ANT_DEFAULTS], io)
|
||||||
|
with open(os.path.join(PATH_TO_KOJI, MODULES_YAML_GZ), 'wb') as f:
|
||||||
|
f.write(gzip.compress(io.getvalue().encode()))
|
||||||
|
return modules_gz_path
|
||||||
|
|
||||||
|
def test_export_modules(self):
|
||||||
|
modules_gz_path = self._prepare_test_data()
|
||||||
|
|
||||||
|
paths = [open(modules_gz_path, 'rb')]
|
||||||
|
collect_modules(paths, PATH_TO_KOJI)
|
||||||
|
|
||||||
|
# check directory structure matches expected
|
||||||
|
self.assertEqual([MODULES_YAML_GZ, 'modules', 'module_defaults'], os.listdir(PATH_TO_KOJI))
|
||||||
|
self.assertEqual(['mariadb-devel-10.3_1-8010020200108182321.cdc1202b',
|
||||||
|
'javapackages-tools-201801-8000020190628172923.b07bea58'],
|
||||||
|
os.listdir(os.path.join(PATH_TO_KOJI, 'modules/x86_64')))
|
||||||
|
self.assertEqual([EMPTY_FILE, 'ant.yaml'],
|
||||||
|
os.listdir(os.path.join(PATH_TO_KOJI, 'module_defaults')))
|
||||||
|
|
||||||
|
# check that modules were exported
|
||||||
|
self.assertEqual(MARIADB_MODULE, yaml.safe_load(
|
||||||
|
open(os.path.join(PATH_TO_KOJI, 'modules/x86_64', 'mariadb-devel-10.3_1-8010020200108182321.cdc1202b'))))
|
||||||
|
self.assertEqual(JAVAPACKAGES_TOOLS_MODULE, yaml.safe_load(
|
||||||
|
open(os.path.join(PATH_TO_KOJI, 'modules/x86_64', 'javapackages-tools-201801-8000020190628172923.b07bea58'))))
|
||||||
|
|
||||||
|
# check that defaults were copied
|
||||||
|
self.assertEqual(ANT_DEFAULTS, yaml.safe_load(
|
||||||
|
open(os.path.join(PATH_TO_KOJI, 'module_defaults', 'ant.yaml'))))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
|
@ -4,7 +4,7 @@ import copy
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import unittest2 as unittest
|
import unittest2 as unittest
|
||||||
|
|
|
@ -0,0 +1,151 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pyfakefs.fake_filesystem_unittest import TestCase
|
||||||
|
|
||||||
|
from pungi.scripts.gather_rpms import search_rpms, copy_rpms, Package
|
||||||
|
from productmd.common import parse_nvra
|
||||||
|
|
||||||
|
PATH_TO_REPOS = '/path/to/repos'
|
||||||
|
MODULES_YAML_GZ = 'modules.yaml.gz'
|
||||||
|
|
||||||
|
|
||||||
|
class TestGatherRpms(TestCase):
|
||||||
|
maxDiff = None
|
||||||
|
|
||||||
|
FILES_TO_CREATE = [
|
||||||
|
'powertools/Packages/libvirt-6.0.0-28.module_el'
|
||||||
|
'8.3.0+555+a55c8938.i686.rpm',
|
||||||
|
'powertools/Packages/libgit2-devel-0.26.8-2.el8.x86_64.rpm',
|
||||||
|
'powertools/Packages/xalan-j2-2.7.1-38.module_el'
|
||||||
|
'8.0.0+30+832da3a1.noarch.rpm',
|
||||||
|
'appstream/Packages/bnd-maven-plugin-3.5.0-4.module_el'
|
||||||
|
'8.0.0+30+832da3a1.noarch.rpm',
|
||||||
|
'appstream/Packages/OpenEXR-devel-2.2.0-11.el8.i686.rpm',
|
||||||
|
'appstream/Packages/mingw-binutils-generic-2.30-1.el8.x86_64.rpm',
|
||||||
|
'appstream/Packages/somenonrpm',
|
||||||
|
]
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.setUpPyfakefs()
|
||||||
|
|
||||||
|
os.makedirs(PATH_TO_REPOS)
|
||||||
|
|
||||||
|
for filepath in self.FILES_TO_CREATE:
|
||||||
|
os.makedirs(
|
||||||
|
os.path.join(PATH_TO_REPOS, os.path.dirname(filepath)),
|
||||||
|
exist_ok=True,
|
||||||
|
)
|
||||||
|
open(os.path.join(PATH_TO_REPOS, filepath), 'w').close()
|
||||||
|
|
||||||
|
def test_gather_rpms(self):
|
||||||
|
self.assertEqual(
|
||||||
|
[Package(nvra=parse_nvra('libvirt-6.0.0-28.module_'
|
||||||
|
'el8.3.0+555+a55c8938.i686'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
|
f'libvirt-6.0.0-28.module_el'
|
||||||
|
f'8.3.0+555+a55c8938.i686.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('libgit2-devel-0.26.8-2.el8.x86_64'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
|
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('xalan-j2-2.7.1-38.module_el'
|
||||||
|
'8.0.0+30+832da3a1.noarch'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
|
f'xalan-j2-2.7.1-38.module_el'
|
||||||
|
f'8.0.0+30+832da3a1.noarch.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('bnd-maven-plugin-3.5.0-4.module_el'
|
||||||
|
'8.0.0+30+832da3a1.noarch'),
|
||||||
|
path=Path(
|
||||||
|
'/path/to/repos/appstream/Packages/'
|
||||||
|
'bnd-maven-plugin-3.5.0-4.module_el'
|
||||||
|
'8.0.0+30+832da3a1.noarch.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('OpenEXR-devel-2.2.0-11.el8.i686'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
|
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('mingw-binutils-generic-'
|
||||||
|
'2.30-1.el8.x86_64'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
|
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
|
||||||
|
))
|
||||||
|
],
|
||||||
|
search_rpms(Path(PATH_TO_REPOS))
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_copy_rpms(self):
|
||||||
|
target_path = Path('/mnt/koji')
|
||||||
|
packages = [
|
||||||
|
|
||||||
|
Package(nvra=parse_nvra('libvirt-6.0.0-28.module_'
|
||||||
|
'el8.3.0+555+a55c8938.i686'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
|
f'libvirt-6.0.0-28.module_el'
|
||||||
|
f'8.3.0+555+a55c8938.i686.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('libgit2-devel-0.26.8-2.el8.x86_64'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
|
f'libgit2-devel-0.26.8-2.el8.x86_64.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('xalan-j2-2.7.1-38.module_'
|
||||||
|
'el8.0.0+30+832da3a1.noarch'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/powertools/Packages/'
|
||||||
|
f'xalan-j2-2.7.1-38.module_el'
|
||||||
|
f'8.0.0+30+832da3a1.noarch.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('bnd-maven-plugin-3.5.0-4.module_el'
|
||||||
|
'8.0.0+30+832da3a1.noarch'),
|
||||||
|
path=Path(
|
||||||
|
'/path/to/repos/appstream/Packages/'
|
||||||
|
'bnd-maven-plugin-3.5.0-4.module_el'
|
||||||
|
'8.0.0+30+832da3a1.noarch.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('OpenEXR-devel-2.2.0-11.el8.i686'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
|
f'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
|
||||||
|
)),
|
||||||
|
Package(nvra=parse_nvra('mingw-binutils-generic-'
|
||||||
|
'2.30-1.el8.x86_64'),
|
||||||
|
path=Path(
|
||||||
|
f'{PATH_TO_REPOS}/appstream/Packages/'
|
||||||
|
f'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
|
||||||
|
))
|
||||||
|
]
|
||||||
|
copy_rpms(packages, target_path, [])
|
||||||
|
|
||||||
|
self.assertCountEqual([
|
||||||
|
'xalan-j2-2.7.1-38.module_el8.0.0+30+832da3a1.noarch.rpm',
|
||||||
|
'bnd-maven-plugin-3.5.0-4.module_el8.0.0+30+832da3a1.noarch.rpm'
|
||||||
|
], os.listdir(target_path / 'noarch'))
|
||||||
|
|
||||||
|
self.assertCountEqual([
|
||||||
|
'libgit2-devel-0.26.8-2.el8.x86_64.rpm',
|
||||||
|
'mingw-binutils-generic-2.30-1.el8.x86_64.rpm'
|
||||||
|
], os.listdir(target_path / 'x86_64'))
|
||||||
|
|
||||||
|
self.assertCountEqual([
|
||||||
|
'libvirt-6.0.0-28.module_el8.3.0+555+a55c8938.i686.rpm',
|
||||||
|
'OpenEXR-devel-2.2.0-11.el8.i686.rpm'
|
||||||
|
], os.listdir(target_path / 'i686'))
|
||||||
|
|
||||||
|
self.assertCountEqual([
|
||||||
|
'i686', 'x86_64', 'noarch'
|
||||||
|
], os.listdir(target_path))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
|
@ -5,7 +5,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import mock
|
from unittest import mock
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from pungi.phases.gather.sources.source_module import GatherSourceModule
|
from pungi.phases.gather.sources.source_module import GatherSourceModule
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue