2015-04-24 22:51:28 +00:00
|
|
|
from __future__ import absolute_import
|
2015-02-10 13:19:34 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; version 2 of the License.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Library General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-09-21 12:49:13 +00:00
|
|
|
# along with this program; if not, see <https://gnu.org/licenses/>.
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import glob
|
2017-08-28 12:31:09 +00:00
|
|
|
import six
|
2017-09-21 07:29:53 +00:00
|
|
|
from six.moves import shlex_quote
|
2019-10-07 08:37:06 +00:00
|
|
|
from six.moves.urllib.request import urlretrieve
|
|
|
|
from fnmatch import fnmatch
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
import kobo.log
|
|
|
|
from kobo.shortcuts import run, force_list
|
2017-05-29 08:44:43 +00:00
|
|
|
from pungi.util import (explode_rpm_package, makedirs, copy_all, temp_dir,
|
|
|
|
retry)
|
2019-10-07 08:37:06 +00:00
|
|
|
from .kojiwrapper import KojiWrapper
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-02-17 12:14:49 +00:00
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
class ScmBase(kobo.log.LoggingBase):
|
2019-10-07 08:37:06 +00:00
|
|
|
def __init__(self, logger=None, command=None, compose=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
kobo.log.LoggingBase.__init__(self, logger=logger)
|
2017-08-08 11:14:48 +00:00
|
|
|
self.command = command
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-05-29 08:44:43 +00:00
|
|
|
@retry(interval=60, timeout=300, wait_on=RuntimeError)
|
|
|
|
def retry_run(self, cmd, **kwargs):
|
2015-02-10 13:19:34 +00:00
|
|
|
"""
|
|
|
|
@param cmd - cmd passed to kobo.shortcuts.run()
|
|
|
|
@param **kwargs - args passed to kobo.shortcuts.run()
|
|
|
|
"""
|
|
|
|
|
2017-05-29 08:44:43 +00:00
|
|
|
return run(cmd, **kwargs)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-08-08 11:14:48 +00:00
|
|
|
def run_process_command(self, cwd):
|
|
|
|
if self.command:
|
|
|
|
self.log_debug('Running "%s"' % self.command)
|
2019-06-19 12:54:48 +00:00
|
|
|
retcode, output = run(
|
|
|
|
self.command,
|
|
|
|
workdir=cwd,
|
|
|
|
can_fail=True,
|
|
|
|
stdin_data="",
|
|
|
|
universal_newlines=True,
|
|
|
|
)
|
2017-08-08 11:14:48 +00:00
|
|
|
if retcode != 0:
|
2018-02-20 11:16:02 +00:00
|
|
|
self.log_error('Output was: %r' % output)
|
2017-08-08 11:14:48 +00:00
|
|
|
raise RuntimeError('%r failed with exit code %s'
|
|
|
|
% (self.command, retcode))
|
|
|
|
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
class FileWrapper(ScmBase):
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
2016-06-23 06:07:23 +00:00
|
|
|
self.log_debug("Exporting directory %s from current working directory..."
|
|
|
|
% (scm_dir))
|
2015-02-10 13:19:34 +00:00
|
|
|
if scm_root:
|
|
|
|
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
|
|
|
dirs = glob.glob(scm_dir)
|
2016-04-12 11:34:41 +00:00
|
|
|
if not dirs:
|
|
|
|
raise RuntimeError('No directories matched, can not export.')
|
2015-02-10 13:19:34 +00:00
|
|
|
for i in dirs:
|
2016-05-12 11:44:42 +00:00
|
|
|
copy_all(i, target_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
if scm_root:
|
|
|
|
raise ValueError("FileWrapper: 'scm_root' should be empty.")
|
2016-06-23 06:07:23 +00:00
|
|
|
self.log_debug("Exporting file %s from current working directory..."
|
|
|
|
% (scm_file))
|
2015-02-10 13:19:34 +00:00
|
|
|
files = glob.glob(scm_file)
|
2016-04-12 11:34:41 +00:00
|
|
|
if not files:
|
|
|
|
raise RuntimeError('No files matched, can not export.')
|
2015-02-10 13:19:34 +00:00
|
|
|
for i in files:
|
|
|
|
target_path = os.path.join(target_dir, os.path.basename(i))
|
|
|
|
shutil.copy2(i, target_path)
|
|
|
|
|
|
|
|
|
|
|
|
class CvsWrapper(ScmBase):
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
scm_dir = scm_dir.lstrip("/")
|
|
|
|
scm_branch = scm_branch or "HEAD"
|
2017-04-27 13:54:15 +00:00
|
|
|
with temp_dir() as tmp_dir:
|
2016-02-18 13:31:56 +00:00
|
|
|
self.log_debug("Exporting directory %s from CVS %s (branch %s)..."
|
|
|
|
% (scm_dir, scm_root, scm_branch))
|
|
|
|
self.retry_run(["/usr/bin/cvs", "-q", "-d", scm_root, "export", "-r", scm_branch, scm_dir],
|
2017-04-27 13:54:15 +00:00
|
|
|
workdir=tmp_dir, show_cmd=True)
|
2016-05-12 11:44:42 +00:00
|
|
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
scm_file = scm_file.lstrip("/")
|
|
|
|
scm_branch = scm_branch or "HEAD"
|
2017-04-27 13:54:15 +00:00
|
|
|
with temp_dir() as tmp_dir:
|
2016-02-18 13:31:56 +00:00
|
|
|
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
|
|
|
self.log_debug("Exporting file %s from CVS %s (branch %s)..." % (scm_file, scm_root, scm_branch))
|
|
|
|
self.retry_run(["/usr/bin/cvs", "-q", "-d", scm_root, "export", "-r", scm_branch, scm_file],
|
2017-04-27 13:54:15 +00:00
|
|
|
workdir=tmp_dir, show_cmd=True)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-02-18 13:31:56 +00:00
|
|
|
makedirs(target_dir)
|
|
|
|
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
class GitWrapper(ScmBase):
|
2019-02-25 13:07:58 +00:00
|
|
|
|
|
|
|
def _clone(self, repo, branch, destdir):
|
|
|
|
"""Get a single commit from a repository.
|
|
|
|
|
|
|
|
We can't use git-archive as that does not support arbitrary hash as
|
|
|
|
commit, and git-clone can only get a branch too. Thus the workaround is
|
|
|
|
to create a new local repo, fetch the commit from remote and then check
|
|
|
|
it out. If that fails, we get a full clone.
|
|
|
|
|
|
|
|
Finally the post-processing command is ran.
|
|
|
|
"""
|
|
|
|
if "://" not in repo:
|
|
|
|
repo = "file://%s" % repo
|
|
|
|
|
|
|
|
run(["git", "init"], workdir=destdir)
|
|
|
|
try:
|
2019-03-07 09:25:17 +00:00
|
|
|
run(["git", "fetch", "--depth=1", repo, branch], workdir=destdir)
|
2019-02-25 13:07:58 +00:00
|
|
|
run(["git", "checkout", "FETCH_HEAD"], workdir=destdir)
|
2019-03-07 09:25:17 +00:00
|
|
|
except RuntimeError:
|
2019-02-25 13:07:58 +00:00
|
|
|
# Fetch failed, to do a full clone we add a remote to our empty
|
|
|
|
# repo, get its content and check out the reference we want.
|
|
|
|
run(["git", "remote", "add", "origin", repo], workdir=destdir)
|
|
|
|
self.retry_run(["git", "remote", "update", "origin"], workdir=destdir)
|
|
|
|
run(["git", "checkout", branch], workdir=destdir)
|
|
|
|
|
|
|
|
self.run_process_command(destdir)
|
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
scm_dir = scm_dir.lstrip("/")
|
|
|
|
scm_branch = scm_branch or "master"
|
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
with temp_dir() as tmp_dir:
|
2016-02-18 13:31:56 +00:00
|
|
|
self.log_debug("Exporting directory %s from git %s (branch %s)..."
|
|
|
|
% (scm_dir, scm_root, scm_branch))
|
2019-02-25 13:07:58 +00:00
|
|
|
|
|
|
|
self._clone(scm_root, scm_branch, tmp_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-05-12 11:44:42 +00:00
|
|
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
2015-02-10 13:19:34 +00:00
|
|
|
scm_file = scm_file.lstrip("/")
|
|
|
|
scm_branch = scm_branch or "master"
|
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
with temp_dir() as tmp_dir:
|
2016-02-18 13:31:56 +00:00
|
|
|
target_path = os.path.join(target_dir, os.path.basename(scm_file))
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-02-18 13:31:56 +00:00
|
|
|
self.log_debug("Exporting file %s from git %s (branch %s)..."
|
|
|
|
% (scm_file, scm_root, scm_branch))
|
2019-02-25 13:07:58 +00:00
|
|
|
|
|
|
|
self._clone(scm_root, scm_branch, tmp_dir)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2016-02-18 13:31:56 +00:00
|
|
|
makedirs(target_dir)
|
|
|
|
shutil.copy2(os.path.join(tmp_dir, scm_file), target_path)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RpmScmWrapper(ScmBase):
|
2016-03-11 12:44:03 +00:00
|
|
|
def _list_rpms(self, pats):
|
|
|
|
for pat in force_list(pats):
|
|
|
|
for rpm in glob.glob(pat):
|
|
|
|
yield rpm
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_dir(self, scm_root, scm_dir, target_dir, scm_branch=None):
|
2016-03-11 12:44:03 +00:00
|
|
|
for rpm in self._list_rpms(scm_root):
|
|
|
|
scm_dir = scm_dir.lstrip("/")
|
2017-04-27 13:54:15 +00:00
|
|
|
with temp_dir() as tmp_dir:
|
2016-02-18 13:31:56 +00:00
|
|
|
self.log_debug("Extracting directory %s from RPM package %s..." % (scm_dir, rpm))
|
|
|
|
explode_rpm_package(rpm, tmp_dir)
|
|
|
|
|
|
|
|
makedirs(target_dir)
|
|
|
|
# "dir" includes the whole directory while "dir/" includes it's content
|
|
|
|
if scm_dir.endswith("/"):
|
2016-05-12 11:44:42 +00:00
|
|
|
copy_all(os.path.join(tmp_dir, scm_dir), target_dir)
|
2016-02-18 13:31:56 +00:00
|
|
|
else:
|
2017-09-21 07:29:53 +00:00
|
|
|
run("cp -a %s %s/" % (shlex_quote(os.path.join(tmp_dir, scm_dir)),
|
|
|
|
shlex_quote(target_dir)))
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-04-27 13:54:15 +00:00
|
|
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
2016-03-11 12:44:03 +00:00
|
|
|
for rpm in self._list_rpms(scm_root):
|
|
|
|
scm_file = scm_file.lstrip("/")
|
2017-04-27 13:54:15 +00:00
|
|
|
with temp_dir() as tmp_dir:
|
2016-02-18 13:31:56 +00:00
|
|
|
self.log_debug("Exporting file %s from RPM file %s..." % (scm_file, rpm))
|
|
|
|
explode_rpm_package(rpm, tmp_dir)
|
|
|
|
|
|
|
|
makedirs(target_dir)
|
|
|
|
for src in glob.glob(os.path.join(tmp_dir, scm_file)):
|
|
|
|
dst = os.path.join(target_dir, os.path.basename(src))
|
|
|
|
shutil.copy2(src, dst)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2019-10-07 08:37:06 +00:00
|
|
|
class KojiScmWrapper(ScmBase):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(KojiScmWrapper, self).__init__(*args, **kwargs)
|
|
|
|
try:
|
|
|
|
profile = kwargs["compose"].conf["koji_profile"]
|
|
|
|
except KeyError:
|
|
|
|
raise RuntimeError("Koji profile must be configured")
|
|
|
|
wrapper = KojiWrapper(profile)
|
|
|
|
self.koji = wrapper.koji_module
|
|
|
|
self.proxy = wrapper.koji_proxy
|
|
|
|
|
|
|
|
def export_dir(self, *args, **kwargs):
|
|
|
|
raise RuntimeError("Only files can be exported from Koji")
|
|
|
|
|
|
|
|
def export_file(self, scm_root, scm_file, target_dir, scm_branch=None):
|
|
|
|
if scm_branch:
|
|
|
|
self._get_latest_from_tag(scm_branch, scm_root, scm_file, target_dir)
|
|
|
|
else:
|
|
|
|
self._get_from_build(scm_root, scm_file, target_dir)
|
|
|
|
|
|
|
|
def _get_latest_from_tag(self, koji_tag, package, file_pattern, target_dir):
|
|
|
|
self.log_debug(
|
|
|
|
"Exporting file %s from latest Koji package %s in tag %s",
|
|
|
|
file_pattern,
|
|
|
|
package,
|
|
|
|
koji_tag,
|
|
|
|
)
|
|
|
|
builds = self.proxy.listTagged(koji_tag, package=package, latest=True)
|
|
|
|
if len(builds) != 1:
|
|
|
|
raise RuntimeError("No package %s in tag %s", package, koji_tag)
|
|
|
|
self._download_build(builds[0], file_pattern, target_dir)
|
|
|
|
|
|
|
|
def _get_from_build(self, build_id, file_pattern, target_dir):
|
|
|
|
self.log_debug(
|
|
|
|
"Exporting file %s from Koji build %s", file_pattern, build_id
|
|
|
|
)
|
|
|
|
build = self.proxy.getBuild(build_id)
|
|
|
|
self._download_build(build, file_pattern, target_dir)
|
|
|
|
|
|
|
|
def _download_build(self, build, file_pattern, target_dir):
|
|
|
|
for archive in self.proxy.listArchives(build["build_id"]):
|
|
|
|
filename = archive["filename"]
|
|
|
|
if not fnmatch(filename, file_pattern):
|
|
|
|
continue
|
|
|
|
typedir = self.koji.pathinfo.typedir(build, archive["btype"])
|
|
|
|
file_path = os.path.join(typedir, filename)
|
|
|
|
url = file_path.replace(self.koji.config.topdir, self.koji.config.topurl)
|
|
|
|
target_file = os.path.join(target_dir, filename)
|
|
|
|
urlretrieve(url, target_file)
|
|
|
|
|
|
|
|
|
2016-02-17 12:14:49 +00:00
|
|
|
def _get_wrapper(scm_type, *args, **kwargs):
|
|
|
|
SCM_WRAPPERS = {
|
|
|
|
"file": FileWrapper,
|
|
|
|
"cvs": CvsWrapper,
|
|
|
|
"git": GitWrapper,
|
|
|
|
"rpm": RpmScmWrapper,
|
2019-10-07 08:37:06 +00:00
|
|
|
"koji": KojiScmWrapper,
|
2016-02-17 12:14:49 +00:00
|
|
|
}
|
|
|
|
try:
|
2019-10-07 08:37:06 +00:00
|
|
|
cls = SCM_WRAPPERS[scm_type]
|
2016-02-17 12:14:49 +00:00
|
|
|
except KeyError:
|
|
|
|
raise ValueError("Unknown SCM type: %s" % scm_type)
|
2019-10-07 08:37:06 +00:00
|
|
|
return cls(*args, **kwargs)
|
2016-02-17 12:14:49 +00:00
|
|
|
|
|
|
|
|
2019-10-07 10:26:22 +00:00
|
|
|
def get_file_from_scm(scm_dict, target_path, compose=None):
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
"""
|
|
|
|
Copy one or more files from source control to a target path. A list of files
|
|
|
|
created in ``target_path`` is returned.
|
|
|
|
|
|
|
|
:param scm_dict:
|
|
|
|
A dictionary describing the source control repository; this can
|
|
|
|
optionally be a path to a directory on the local filesystem or reference
|
|
|
|
an RPM. Supported keys for the dictionary are ``scm``, ``repo``,
|
|
|
|
``file``, and ``branch``. ``scm`` is the type of version control system
|
|
|
|
used ('git', 'cvs', 'rpm', etc.), ``repo`` is the URL of the repository
|
|
|
|
(or, if 'rpm' is the ``scm``, the package name), ``file`` is either a
|
|
|
|
path or list of paths to copy, and ``branch`` is the branch to check
|
|
|
|
out, if any.
|
|
|
|
|
|
|
|
:param target_path:
|
|
|
|
The destination path for the files being copied.
|
|
|
|
|
|
|
|
:param logger:
|
|
|
|
The logger to use for any logging performed.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
>>> scm_dict = {
|
|
|
|
>>> 'scm': 'git',
|
|
|
|
>>> 'repo': 'https://pagure.io/pungi.git',
|
|
|
|
>>> 'file': ['share/variants.dtd'],
|
|
|
|
>>> }
|
|
|
|
>>> target_path = '/tmp/path/'
|
|
|
|
>>> get_file_from_scm(scm_dict, target_path)
|
|
|
|
['/tmp/path/share/variants.dtd']
|
|
|
|
"""
|
2017-08-28 12:31:09 +00:00
|
|
|
if isinstance(scm_dict, six.string_types):
|
2015-02-10 13:19:34 +00:00
|
|
|
scm_type = "file"
|
|
|
|
scm_repo = None
|
|
|
|
scm_file = os.path.abspath(scm_dict)
|
|
|
|
scm_branch = None
|
2017-08-08 11:14:48 +00:00
|
|
|
command = None
|
2015-02-10 13:19:34 +00:00
|
|
|
else:
|
|
|
|
scm_type = scm_dict["scm"]
|
|
|
|
scm_repo = scm_dict["repo"]
|
|
|
|
scm_file = scm_dict["file"]
|
|
|
|
scm_branch = scm_dict.get("branch", None)
|
2017-08-08 11:14:48 +00:00
|
|
|
command = scm_dict.get('command')
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2019-10-07 10:26:22 +00:00
|
|
|
logger = compose._logger if compose else None
|
2019-10-07 08:37:06 +00:00
|
|
|
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
files_copied = []
|
2015-02-10 13:19:34 +00:00
|
|
|
for i in force_list(scm_file):
|
2017-02-17 12:44:11 +00:00
|
|
|
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
|
|
|
scm.export_file(scm_repo, i, scm_branch=scm_branch, target_dir=tmp_dir)
|
|
|
|
files_copied += copy_all(tmp_dir, target_path)
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
return files_copied
|
2015-02-10 13:19:34 +00:00
|
|
|
|
|
|
|
|
2019-10-07 10:26:22 +00:00
|
|
|
def get_dir_from_scm(scm_dict, target_path, compose=None):
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
"""
|
|
|
|
Copy a directory from source control to a target path. A list of files
|
|
|
|
created in ``target_path`` is returned.
|
|
|
|
|
|
|
|
:param scm_dict:
|
|
|
|
A dictionary describing the source control repository; this can
|
|
|
|
optionally be a path to a directory on the local filesystem or reference
|
|
|
|
an RPM. Supported keys for the dictionary are ``scm``, ``repo``,
|
|
|
|
``dir``, and ``branch``. ``scm`` is the type of version control system
|
|
|
|
used ('git', 'cvs', 'rpm', etc.), ``repo`` is the URL of the repository
|
|
|
|
(or, if 'rpm' is the ``scm``, the package name), ``dir`` is the
|
|
|
|
directory to copy, and ``branch`` is the branch to check out, if any.
|
|
|
|
|
|
|
|
:param target_path:
|
|
|
|
The destination path for the directory being copied.
|
|
|
|
|
|
|
|
:param logger:
|
|
|
|
The logger to use for any logging performed.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
>>> scm_dict = {
|
|
|
|
>>> 'scm': 'git',
|
|
|
|
>>> 'repo': 'https://pagure.io/pungi.git',
|
|
|
|
>>> 'dir': 'share,
|
|
|
|
>>> }
|
|
|
|
>>> target_path = '/tmp/path/'
|
|
|
|
>>> get_dir_from_scm(scm_dict, target_path)
|
|
|
|
['/tmp/path/share/variants.dtd', '/tmp/path/share/rawhide-fedora.ks', ...]
|
|
|
|
"""
|
2017-08-28 12:31:09 +00:00
|
|
|
if isinstance(scm_dict, six.string_types):
|
2015-02-10 13:19:34 +00:00
|
|
|
scm_type = "file"
|
|
|
|
scm_repo = None
|
|
|
|
scm_dir = os.path.abspath(scm_dict)
|
|
|
|
scm_branch = None
|
2017-08-08 11:14:48 +00:00
|
|
|
command = None
|
2015-02-10 13:19:34 +00:00
|
|
|
else:
|
|
|
|
scm_type = scm_dict["scm"]
|
|
|
|
scm_repo = scm_dict.get("repo", None)
|
|
|
|
scm_dir = scm_dict["dir"]
|
|
|
|
scm_branch = scm_dict.get("branch", None)
|
2017-08-08 11:14:48 +00:00
|
|
|
command = scm_dict.get("command")
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2019-10-07 10:26:22 +00:00
|
|
|
logger = compose._logger if compose else None
|
2019-10-07 08:37:06 +00:00
|
|
|
scm = _get_wrapper(scm_type, logger=logger, command=command, compose=compose)
|
2015-02-10 13:19:34 +00:00
|
|
|
|
2017-02-17 12:44:11 +00:00
|
|
|
with temp_dir(prefix="scm_checkout_") as tmp_dir:
|
|
|
|
scm.export_dir(scm_repo, scm_dir, scm_branch=scm_branch, target_dir=tmp_dir)
|
|
|
|
files_copied = copy_all(tmp_dir, target_path)
|
extra-files: Write a metadata file enumerating extra files
Introduces a new metadata file to track arbitrary files added during the
extra-files phase. This file is placed in the root of each tree and is
called ``extra_files.json``. It is a JSON file containing a single
object, which contains a "header" key with an object describing the
metadata, and a "data" key, which is an array of objects, where each
object represents a file. Each object contains the "file", "checksums",
and "size" keys. "file" is the relative path from the tree root to the
extra file. "checksums" is an object containing one or more checksums,
where the key is the digest type and the value of that key is the hex
digest. Finally, the size is the size of the file in bytes.
For example:
{
"header": {"version": "1.0},
"data": [
{
"file": "GPL",
"checksums": {
"sha256": "8177f97513213526df2cf6184d8ff986c675afb514d4e68a404010521b880643"
},
"size": 18092
},
{
"file": "release-notes/notes.html",
"checksums": {
"sha256": "82b1ba8db522aadf101dca6404235fba179e559b95ea24ff39ee1e5d9a53bdcb"
},
"size": 1120
}
]
}
Signed-off-by: Jeremy Cline <jeremy@jcline.org>
Fixes: #295
2016-05-31 13:40:20 +00:00
|
|
|
return files_copied
|