Use 2to3 to convert the python2 lorax-composer code to python3
This commit is contained in:
parent
b4096ccb62
commit
9bf93d7154
21
src/pylorax/api/__init__.py
Normal file
21
src/pylorax/api/__init__.py
Normal file
@ -0,0 +1,21 @@
|
||||
#
|
||||
# lorax-composer API server
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
from pylorax.api.crossdomain import crossdomain
|
||||
|
||||
__all__ = ["crossdomain"]
|
||||
|
388
src/pylorax/api/compose.py
Normal file
388
src/pylorax/api/compose.py
Normal file
@ -0,0 +1,388 @@
|
||||
# Copyright (C) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
""" Setup for composing an image
|
||||
|
||||
Adding New Output Types
|
||||
-----------------------
|
||||
|
||||
The new output type must add a kickstart template to ./share/composer/ where the
|
||||
name of the kickstart (without the trailing .ks) matches the entry in compose_args.
|
||||
|
||||
The kickstart should not have any url or repo entries, these will be added at build
|
||||
time. The %packages section should be the last thing, and while it can contain mandatory
|
||||
packages required by the output type, it should not have the trailing %end because the
|
||||
package NEVRAs will be appended to it at build time.
|
||||
|
||||
compose_args should have a name matching the kickstart, and it should set the novirt_install
|
||||
parameters needed to generate the desired output. Other types should be set to False.
|
||||
|
||||
"""
|
||||
import logging
|
||||
log = logging.getLogger("lorax-composer")
|
||||
|
||||
import os
|
||||
from glob import glob
|
||||
from math import ceil
|
||||
import pytoml as toml
|
||||
import shutil
|
||||
from uuid import uuid4
|
||||
|
||||
from pyanaconda.simpleconfig import SimpleConfigFile
|
||||
|
||||
# Use pykickstart to calculate disk image size
|
||||
from pykickstart.parser import KickstartParser
|
||||
from pykickstart.version import makeVersion, RHEL7
|
||||
|
||||
from pylorax.api.projects import projects_depsolve_with_size, dep_nevra
|
||||
from pylorax.api.projects import ProjectsError
|
||||
from pylorax.api.recipes import read_recipe_and_id
|
||||
from pylorax.imgutils import default_image_name
|
||||
from pylorax.sysutils import joinpaths
|
||||
|
||||
|
||||
def repo_to_ks(r, url="url"):
|
||||
""" Return a kickstart line with the correct args.
|
||||
|
||||
Set url to "baseurl" if it is a repo, leave it as "url" for the installation url.
|
||||
"""
|
||||
cmd = ""
|
||||
if url == "url":
|
||||
if not r.urls:
|
||||
raise RuntimeError("Cannot find a base url for %s" % r.name)
|
||||
|
||||
# url is passed to Anaconda on the cmdline with --repo, so it cannot support a mirror
|
||||
# If a mirror is setup yum will return the list of mirrors in .urls
|
||||
# So just use the first one.
|
||||
cmd += '--%s="%s" ' % (url, r.urls[0])
|
||||
elif r.metalink:
|
||||
# XXX Total Hack
|
||||
# RHEL7 kickstart doesn't support metalink. If the url has 'metalink' in it, rewrite it as 'mirrorlist'
|
||||
if "metalink" in r.metalink:
|
||||
log.info("RHEL7 does not support metalink, translating to mirrorlist")
|
||||
cmd += '--mirrorlist="%s" ' % r.metalink.replace("metalink", "mirrorlist")
|
||||
else:
|
||||
log.error("Could not convert metalink to mirrorlist. %s", r.metalink)
|
||||
raise RuntimeError("Cannot convert metalink to mirrorlist: %s" % r.metalink)
|
||||
elif r.mirrorlist:
|
||||
cmd += '--mirrorlist="%s" ' % r.mirrorlist
|
||||
elif r.baseurl:
|
||||
cmd += '--%s="%s" ' % (url, r.baseurl[0])
|
||||
else:
|
||||
raise RuntimeError("Repo has no baseurl or mirror")
|
||||
|
||||
if r.proxy:
|
||||
cmd += '--proxy="%s" ' % r.proxy
|
||||
|
||||
if not r.sslverify:
|
||||
cmd += '--noverifyssl'
|
||||
|
||||
return cmd
|
||||
|
||||
def start_build(cfg, yumlock, gitlock, branch, recipe_name, compose_type, test_mode=0):
|
||||
""" Start the build
|
||||
|
||||
:param cfg: Configuration object
|
||||
:type cfg: ComposerConfig
|
||||
:param yumlock: Lock and YumBase for depsolving
|
||||
:type yumlock: YumLock
|
||||
:param recipe: The recipe to build
|
||||
:type recipe: str
|
||||
:param compose_type: The type of output to create from the recipe
|
||||
:type compose_type: str
|
||||
:returns: Unique ID for the build that can be used to track its status
|
||||
:rtype: str
|
||||
"""
|
||||
share_dir = cfg.get("composer", "share_dir")
|
||||
lib_dir = cfg.get("composer", "lib_dir")
|
||||
|
||||
# Make sure compose_type is valid
|
||||
if compose_type not in compose_types(share_dir):
|
||||
raise RuntimeError("Invalid compose type (%s), must be one of %s" % (compose_type, compose_types(share_dir)))
|
||||
|
||||
with gitlock.lock:
|
||||
(commit_id, recipe) = read_recipe_and_id(gitlock.repo, branch, recipe_name)
|
||||
|
||||
# Combine modules and packages and depsolve the list
|
||||
# TODO include the version/glob in the depsolving
|
||||
module_names = [m["name"] for m in recipe["modules"] or []]
|
||||
package_names = [p["name"] for p in recipe["packages"] or []]
|
||||
projects = sorted(set(module_names+package_names), key=lambda n: n.lower())
|
||||
deps = []
|
||||
try:
|
||||
with yumlock.lock:
|
||||
(installed_size, deps) = projects_depsolve_with_size(yumlock.yb, projects, with_core=False)
|
||||
except ProjectsError as e:
|
||||
log.error("start_build depsolve: %s", str(e))
|
||||
raise RuntimeError("Problem depsolving %s: %s" % (recipe["name"], str(e)))
|
||||
|
||||
# Read the kickstart template for this type
|
||||
ks_template_path = joinpaths(share_dir, "composer", compose_type) + ".ks"
|
||||
ks_template = open(ks_template_path, "r").read()
|
||||
|
||||
# How much space will the packages in the default template take?
|
||||
ks_version = makeVersion(RHEL7)
|
||||
ks = KickstartParser(ks_version, errorsAreFatal=False, missingIncludeIsFatal=False)
|
||||
ks.readKickstartFromString(ks_template+"\n%end\n")
|
||||
try:
|
||||
with yumlock.lock:
|
||||
(template_size, _) = projects_depsolve_with_size(yumlock.yb, ks.handler.packages.packageList,
|
||||
with_core=not ks.handler.packages.nocore)
|
||||
except ProjectsError as e:
|
||||
log.error("start_build depsolve: %s", str(e))
|
||||
raise RuntimeError("Problem depsolving %s: %s" % (recipe["name"], str(e)))
|
||||
log.debug("installed_size = %d, template_size=%d", installed_size, template_size)
|
||||
|
||||
# Minimum LMC disk size is 1GiB, and anaconda bumps the estimated size up by 35% (which doesn't always work).
|
||||
installed_size = max(1024**3, int((installed_size+template_size) * 1.4))
|
||||
log.debug("/ partition size = %d", installed_size)
|
||||
|
||||
# Create the results directory
|
||||
build_id = str(uuid4())
|
||||
results_dir = joinpaths(lib_dir, "results", build_id)
|
||||
os.makedirs(results_dir)
|
||||
|
||||
# Write the recipe commit hash
|
||||
commit_path = joinpaths(results_dir, "COMMIT")
|
||||
with open(commit_path, "w") as f:
|
||||
f.write(commit_id)
|
||||
|
||||
# Write the original recipe
|
||||
recipe_path = joinpaths(results_dir, "blueprint.toml")
|
||||
with open(recipe_path, "w") as f:
|
||||
f.write(recipe.toml())
|
||||
|
||||
# Write the frozen recipe
|
||||
frozen_recipe = recipe.freeze(deps)
|
||||
recipe_path = joinpaths(results_dir, "frozen.toml")
|
||||
with open(recipe_path, "w") as f:
|
||||
f.write(frozen_recipe.toml())
|
||||
|
||||
# Write out the dependencies to the results dir
|
||||
deps_path = joinpaths(results_dir, "deps.toml")
|
||||
with open(deps_path, "w") as f:
|
||||
f.write(toml.dumps({"packages":deps}).encode("UTF-8"))
|
||||
|
||||
# Save a copy of the original kickstart
|
||||
shutil.copy(ks_template_path, results_dir)
|
||||
|
||||
# Create the final kickstart with repos and package list
|
||||
ks_path = joinpaths(results_dir, "final-kickstart.ks")
|
||||
with open(ks_path, "w") as f:
|
||||
with yumlock.lock:
|
||||
repos = yumlock.yb.repos.listEnabled()
|
||||
if not repos:
|
||||
raise RuntimeError("No enabled repos, canceling build.")
|
||||
|
||||
ks_url = repo_to_ks(repos[0], "url")
|
||||
log.debug("url = %s", ks_url)
|
||||
f.write('url %s\n' % ks_url)
|
||||
for idx, r in enumerate(repos[1:]):
|
||||
ks_repo = repo_to_ks(r, "baseurl")
|
||||
log.debug("repo composer-%s = %s", idx, ks_repo)
|
||||
f.write('repo --name="composer-%s" %s\n' % (idx, ks_repo))
|
||||
|
||||
# Write the root partition and it's size in MB (rounded up)
|
||||
f.write('part / --fstype="ext4" --size=%d\n' % ceil(installed_size / 1024**2))
|
||||
|
||||
f.write(ks_template)
|
||||
|
||||
for d in deps:
|
||||
f.write(dep_nevra(d)+"\n")
|
||||
|
||||
f.write("%end\n")
|
||||
|
||||
# Setup the config to pass to novirt_install
|
||||
log_dir = joinpaths(results_dir, "logs/")
|
||||
cfg_args = compose_args(compose_type)
|
||||
|
||||
# Get the title, project, and release version from the host
|
||||
if not os.path.exists("/etc/os-release"):
|
||||
log.error("/etc/os-release is missing, cannot determine product or release version")
|
||||
os_release = SimpleConfigFile("/etc/os-release")
|
||||
os_release.read()
|
||||
|
||||
log.debug("os_release = %s", os_release)
|
||||
|
||||
cfg_args["title"] = os_release.get("PRETTY_NAME")
|
||||
cfg_args["project"] = os_release.get("NAME")
|
||||
cfg_args["releasever"] = os_release.get("VERSION_ID")
|
||||
cfg_args["volid"] = ""
|
||||
|
||||
cfg_args.update({
|
||||
"compression": "xz",
|
||||
"compress_args": [],
|
||||
"ks": [ks_path],
|
||||
"project": "Red Hat Enterprise Linux",
|
||||
"releasever": "7",
|
||||
"logfile": log_dir
|
||||
})
|
||||
with open(joinpaths(results_dir, "config.toml"), "w") as f:
|
||||
f.write(toml.dumps(cfg_args).encode("UTF-8"))
|
||||
|
||||
# Set the initial status
|
||||
open(joinpaths(results_dir, "STATUS"), "w").write("WAITING")
|
||||
|
||||
# Set the test mode, if requested
|
||||
if test_mode > 0:
|
||||
open(joinpaths(results_dir, "TEST"), "w").write("%s" % test_mode)
|
||||
|
||||
log.info("Adding %s (%s %s) to compose queue", build_id, recipe["name"], compose_type)
|
||||
os.symlink(results_dir, joinpaths(lib_dir, "queue/new/", build_id))
|
||||
|
||||
return build_id
|
||||
|
||||
# Supported output types
|
||||
def compose_types(share_dir):
|
||||
r""" Returns a list of the supported output types
|
||||
|
||||
The output types come from the kickstart names in /usr/share/lorax/composer/\*ks
|
||||
"""
|
||||
return sorted([os.path.basename(ks)[:-3] for ks in glob(joinpaths(share_dir, "composer/*.ks"))])
|
||||
|
||||
def compose_args(compose_type):
|
||||
""" Returns the settings to pass to novirt_install for the compose type
|
||||
|
||||
:param compose_type: The type of compose to create, from `compose_types()`
|
||||
:type compose_type: str
|
||||
|
||||
This will return a dict of options that match the ArgumentParser options for livemedia-creator.
|
||||
These are the ones the define the type of output, it's filename, etc.
|
||||
Other options will be filled in by `make_compose()`
|
||||
"""
|
||||
_MAP = {"tar": {"make_iso": False,
|
||||
"make_disk": False,
|
||||
"make_fsimage": False,
|
||||
"make_appliance": False,
|
||||
"make_ami": False,
|
||||
"make_tar": True,
|
||||
"make_pxe_live": False,
|
||||
"make_ostree_live": False,
|
||||
"ostree": False,
|
||||
"live_rootfs_keep_size": False,
|
||||
"live_rootfs_size": 0,
|
||||
"qcow2": False,
|
||||
"qcow2_args": [],
|
||||
"image_name": default_image_name("xz", "root.tar"),
|
||||
"image_only": True,
|
||||
"app_name": None,
|
||||
"app_template": None,
|
||||
"app_file": None
|
||||
},
|
||||
"live-iso": {"make_iso": True,
|
||||
"make_disk": False,
|
||||
"make_fsimage": False,
|
||||
"make_appliance": False,
|
||||
"make_ami": False,
|
||||
"make_tar": False,
|
||||
"make_pxe_live": False,
|
||||
"make_ostree_live": False,
|
||||
"ostree": False,
|
||||
"live_rootfs_keep_size": False,
|
||||
"live_rootfs_size": 0,
|
||||
"qcow2": False,
|
||||
"qcow2_args": [],
|
||||
"image_name": "live.iso",
|
||||
"fs_label": "Anaconda", # Live booting may expect this to be 'Anaconda'
|
||||
"image_only": False,
|
||||
"app_name": None,
|
||||
"app_template": None,
|
||||
"app_file": None
|
||||
},
|
||||
"partitioned-disk": {"make_iso": False,
|
||||
"make_disk": True,
|
||||
"make_fsimage": False,
|
||||
"make_appliance": False,
|
||||
"make_ami": False,
|
||||
"make_tar": False,
|
||||
"make_pxe_live": False,
|
||||
"make_ostree_live": False,
|
||||
"ostree": False,
|
||||
"live_rootfs_keep_size": False,
|
||||
"live_rootfs_size": 0,
|
||||
"qcow2": False,
|
||||
"qcow2_args": [],
|
||||
"image_name": "disk.img",
|
||||
"fs_label": "",
|
||||
"image_only": True,
|
||||
"app_name": None,
|
||||
"app_template": None,
|
||||
"app_file": None
|
||||
},
|
||||
"qcow2": {"make_iso": False,
|
||||
"make_disk": True,
|
||||
"make_fsimage": False,
|
||||
"make_appliance": False,
|
||||
"make_ami": False,
|
||||
"make_tar": False,
|
||||
"make_pxe_live": False,
|
||||
"make_ostree_live": False,
|
||||
"ostree": False,
|
||||
"live_rootfs_keep_size": False,
|
||||
"live_rootfs_size": 0,
|
||||
"qcow2": True,
|
||||
"qcow2_args": [],
|
||||
"image_name": "disk.qcow2",
|
||||
"fs_label": "",
|
||||
"image_only": True,
|
||||
"app_name": None,
|
||||
"app_template": None,
|
||||
"app_file": None
|
||||
},
|
||||
"ext4-filesystem": {"make_iso": False,
|
||||
"make_disk": False,
|
||||
"make_fsimage": True,
|
||||
"make_appliance": False,
|
||||
"make_ami": False,
|
||||
"make_tar": False,
|
||||
"make_pxe_live": False,
|
||||
"make_ostree_live": False,
|
||||
"ostree": False,
|
||||
"live_rootfs_keep_size": False,
|
||||
"live_rootfs_size": 0,
|
||||
"qcow2": False,
|
||||
"qcow2_args": [],
|
||||
"image_name": "filesystem.img",
|
||||
"fs_label": "",
|
||||
"image_only": True,
|
||||
"app_name": None,
|
||||
"app_template": None,
|
||||
"app_file": None
|
||||
},
|
||||
}
|
||||
return _MAP[compose_type]
|
||||
|
||||
def move_compose_results(cfg, results_dir):
|
||||
"""Move the final image to the results_dir and cleanup the unneeded compose files
|
||||
|
||||
:param cfg: Build configuration
|
||||
:type cfg: DataHolder
|
||||
:param results_dir: Directory to put the results into
|
||||
:type results_dir: str
|
||||
"""
|
||||
if cfg["make_tar"]:
|
||||
shutil.move(joinpaths(cfg["result_dir"], cfg["image_name"]), results_dir)
|
||||
elif cfg["make_iso"]:
|
||||
# Output from live iso is always a boot.iso under images/, move and rename it
|
||||
shutil.move(joinpaths(cfg["result_dir"], "images/boot.iso"), joinpaths(results_dir, cfg["image_name"]))
|
||||
elif cfg["make_disk"] or cfg["make_fsimage"]:
|
||||
shutil.move(joinpaths(cfg["result_dir"], cfg["image_name"]), joinpaths(results_dir, cfg["image_name"]))
|
||||
|
||||
|
||||
# Cleanup the compose directory, but only if it looks like a compose directory
|
||||
if os.path.basename(cfg["result_dir"]) == "compose":
|
||||
shutil.rmtree(cfg["result_dir"])
|
||||
else:
|
||||
log.error("Incorrect compose directory, not cleaning up")
|
107
src/pylorax/api/config.py
Normal file
107
src/pylorax/api/config.py
Normal file
@ -0,0 +1,107 @@
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import configparser
|
||||
import grp
|
||||
import os
|
||||
|
||||
from pylorax.sysutils import joinpaths
|
||||
|
||||
class ComposerConfig(configparser.SafeConfigParser):
|
||||
def get_default(self, section, option, default):
|
||||
try:
|
||||
return self.get(section, option)
|
||||
except configparser.Error:
|
||||
return default
|
||||
|
||||
|
||||
def configure(conf_file="/etc/lorax/composer.conf", root_dir="/", test_config=False):
|
||||
"""lorax-composer configuration
|
||||
|
||||
:param conf_file: Path to the config file overriding the default settings
|
||||
:type conf_file: str
|
||||
:param root_dir: Directory to prepend to paths, defaults to /
|
||||
:type root_dir: str
|
||||
:param test_config: Set to True to skip reading conf_file
|
||||
:type test_config: bool
|
||||
"""
|
||||
conf = ComposerConfig()
|
||||
|
||||
# set defaults
|
||||
conf.add_section("composer")
|
||||
conf.set("composer", "share_dir", os.path.realpath(joinpaths(root_dir, "/usr/share/lorax/")))
|
||||
conf.set("composer", "lib_dir", os.path.realpath(joinpaths(root_dir, "/var/lib/lorax/composer/")))
|
||||
conf.set("composer", "yum_conf", os.path.realpath(joinpaths(root_dir, "/var/tmp/composer/yum.conf")))
|
||||
conf.set("composer", "yum_root", os.path.realpath(joinpaths(root_dir, "/var/tmp/composer/yum/root/")))
|
||||
conf.set("composer", "repo_dir", os.path.realpath(joinpaths(root_dir, "/var/tmp/composer/repos.d/")))
|
||||
conf.set("composer", "cache_dir", os.path.realpath(joinpaths(root_dir, "/var/tmp/composer/cache/")))
|
||||
|
||||
conf.add_section("users")
|
||||
conf.set("users", "root", "1")
|
||||
|
||||
# Enable all available repo files by default
|
||||
conf.add_section("repos")
|
||||
conf.set("repos", "use_system_repos", "1")
|
||||
conf.set("repos", "enabled", "*")
|
||||
|
||||
if not test_config:
|
||||
# read the config file
|
||||
if os.path.isfile(conf_file):
|
||||
conf.read(conf_file)
|
||||
|
||||
return conf
|
||||
|
||||
def make_yum_dirs(conf):
|
||||
"""Make any missing yum directories
|
||||
|
||||
:param conf: The configuration to use
|
||||
:type conf: ComposerConfig
|
||||
:returns: None
|
||||
"""
|
||||
for p in ["yum_conf", "repo_dir", "cache_dir", "yum_root"]:
|
||||
p_dir = os.path.dirname(conf.get("composer", p))
|
||||
if not os.path.exists(p_dir):
|
||||
os.makedirs(p_dir)
|
||||
|
||||
def make_queue_dirs(conf, gid):
|
||||
"""Make any missing queue directories
|
||||
|
||||
:param conf: The configuration to use
|
||||
:type conf: ComposerConfig
|
||||
:param gid: Group ID that has access to the queue directories
|
||||
:type gid: int
|
||||
:returns: list of errors
|
||||
:rtype: list of str
|
||||
"""
|
||||
errors = []
|
||||
lib_dir = conf.get("composer", "lib_dir")
|
||||
for p in ["queue/run", "queue/new", "results"]:
|
||||
p_dir = joinpaths(lib_dir, p)
|
||||
if not os.path.exists(p_dir):
|
||||
orig_umask = os.umask(0)
|
||||
os.makedirs(p_dir, 0o771)
|
||||
os.chown(p_dir, 0, gid)
|
||||
os.umask(orig_umask)
|
||||
else:
|
||||
p_stat = os.stat(p_dir)
|
||||
if p_stat.st_mode & 0o006 != 0:
|
||||
errors.append("Incorrect permissions on %s, no o+rw permissions are allowed." % p_dir)
|
||||
|
||||
if p_stat.st_gid != gid or p_stat.st_uid != 0:
|
||||
gr_name = grp.getgrgid(gid).gr_name
|
||||
errors.append("%s should be owned by root:%s" % (p_dir, gr_name))
|
||||
|
||||
return errors
|
64
src/pylorax/api/crossdomain.py
Normal file
64
src/pylorax/api/crossdomain.py
Normal file
@ -0,0 +1,64 @@
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
# crossdomain decorator from - http://flask.pocoo.org/snippets/56/
|
||||
from datetime import timedelta
|
||||
from flask import make_response, request, current_app
|
||||
from functools import update_wrapper
|
||||
|
||||
|
||||
def crossdomain(origin, methods=None, headers=None,
|
||||
max_age=21600, attach_to_all=True,
|
||||
automatic_options=True):
|
||||
if methods is not None:
|
||||
methods = ', '.join(sorted(x.upper() for x in methods))
|
||||
if headers is not None and not isinstance(headers, str):
|
||||
headers = ', '.join(x.upper() for x in headers)
|
||||
if not isinstance(origin, list):
|
||||
origin = [origin]
|
||||
if isinstance(max_age, timedelta):
|
||||
max_age = int(max_age.total_seconds())
|
||||
|
||||
def get_methods():
|
||||
if methods is not None:
|
||||
return methods
|
||||
|
||||
options_resp = current_app.make_default_options_response()
|
||||
return options_resp.headers['allow']
|
||||
|
||||
def decorator(f):
|
||||
def wrapped_function(*args, **kwargs):
|
||||
if automatic_options and request.method == 'OPTIONS':
|
||||
resp = current_app.make_default_options_response()
|
||||
else:
|
||||
resp = make_response(f(*args, **kwargs))
|
||||
if not attach_to_all and request.method != 'OPTIONS':
|
||||
return resp
|
||||
|
||||
h = resp.headers
|
||||
|
||||
h.extend([("Access-Control-Allow-Origin", orig) for orig in origin])
|
||||
h['Access-Control-Allow-Methods'] = get_methods()
|
||||
h['Access-Control-Max-Age'] = str(max_age)
|
||||
if headers is not None:
|
||||
h['Access-Control-Allow-Headers'] = headers
|
||||
return resp
|
||||
|
||||
f.provide_automatic_options = False
|
||||
f.required_methods = ['OPTIONS']
|
||||
return update_wrapper(wrapped_function, f)
|
||||
return decorator
|
311
src/pylorax/api/projects.py
Normal file
311
src/pylorax/api/projects.py
Normal file
@ -0,0 +1,311 @@
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import logging
|
||||
log = logging.getLogger("lorax-composer")
|
||||
|
||||
import time
|
||||
|
||||
from yum.Errors import YumBaseError
|
||||
|
||||
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
|
||||
class ProjectsError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def api_time(t):
|
||||
"""Convert time since epoch to a string
|
||||
|
||||
:param t: Seconds since epoch
|
||||
:type t: int
|
||||
:returns: Time string
|
||||
:rtype: str
|
||||
"""
|
||||
return time.strftime(TIME_FORMAT, time.localtime(t))
|
||||
|
||||
|
||||
def api_changelog(changelog):
|
||||
"""Convert the changelog to a string
|
||||
|
||||
:param changelog: A list of time, author, string tuples.
|
||||
:type changelog: tuple
|
||||
:returns: The most recent changelog text or ""
|
||||
:rtype: str
|
||||
|
||||
This returns only the most recent changelog entry.
|
||||
"""
|
||||
try:
|
||||
entry = changelog[0][2]
|
||||
except IndexError:
|
||||
entry = ""
|
||||
return entry
|
||||
|
||||
|
||||
def yaps_to_project(yaps):
|
||||
"""Extract the details from a YumAvailablePackageSqlite object
|
||||
|
||||
:param yaps: Yum object with package details
|
||||
:type yaps: YumAvailablePackageSqlite
|
||||
:returns: A dict with the name, summary, description, and url.
|
||||
:rtype: dict
|
||||
|
||||
upstream_vcs is hard-coded to UPSTREAM_VCS
|
||||
"""
|
||||
return {"name": yaps.name,
|
||||
"summary": yaps.summary,
|
||||
"description": yaps.description,
|
||||
"homepage": yaps.url,
|
||||
"upstream_vcs": "UPSTREAM_VCS"}
|
||||
|
||||
|
||||
def yaps_to_project_info(yaps):
|
||||
"""Extract the details from a YumAvailablePackageSqlite object
|
||||
|
||||
:param yaps: Yum object with package details
|
||||
:type yaps: YumAvailablePackageSqlite
|
||||
:returns: A dict with the project details, as well as epoch, release, arch, build_time, changelog, ...
|
||||
:rtype: dict
|
||||
|
||||
metadata entries are hard-coded to {}
|
||||
"""
|
||||
build = {"epoch": int(yaps.epoch),
|
||||
"release": yaps.release,
|
||||
"arch": yaps.arch,
|
||||
"build_time": api_time(yaps.buildtime),
|
||||
"changelog": api_changelog(yaps.returnChangelog()),
|
||||
"build_config_ref": "BUILD_CONFIG_REF",
|
||||
"build_env_ref": "BUILD_ENV_REF",
|
||||
"metadata": {},
|
||||
"source": {"license": yaps.license,
|
||||
"version": yaps.version,
|
||||
"source_ref": "SOURCE_REF",
|
||||
"metadata": {}}}
|
||||
|
||||
return {"name": yaps.name,
|
||||
"summary": yaps.summary,
|
||||
"description": yaps.description,
|
||||
"homepage": yaps.url,
|
||||
"upstream_vcs": "UPSTREAM_VCS",
|
||||
"builds": [build]}
|
||||
|
||||
|
||||
def tm_to_dep(tm):
|
||||
"""Extract the info from a TransactionMember object
|
||||
|
||||
:param tm: A Yum transaction object
|
||||
:type tm: TransactionMember
|
||||
:returns: A dict with name, epoch, version, release, arch
|
||||
:rtype: dict
|
||||
"""
|
||||
return {"name": tm.name,
|
||||
"epoch": int(tm.epoch),
|
||||
"version": tm.version,
|
||||
"release": tm.release,
|
||||
"arch": tm.arch}
|
||||
|
||||
|
||||
def yaps_to_module(yaps):
|
||||
"""Extract the name from a YumAvailablePackageSqlite object
|
||||
|
||||
:param yaps: Yum object with package details
|
||||
:type yaps: YumAvailablePackageSqlite
|
||||
:returns: A dict with name, and group_type
|
||||
:rtype: dict
|
||||
|
||||
group_type is hard-coded to "rpm"
|
||||
"""
|
||||
return {"name": yaps.name,
|
||||
"group_type": "rpm"}
|
||||
|
||||
|
||||
def dep_evra(dep):
|
||||
"""Return the epoch:version-release.arch for the dep
|
||||
|
||||
:param dep: dependency dict
|
||||
:type dep: dict
|
||||
:returns: epoch:version-release.arch
|
||||
:rtype: str
|
||||
"""
|
||||
if dep["epoch"] == 0:
|
||||
return dep["version"]+"-"+dep["release"]+"."+dep["arch"]
|
||||
else:
|
||||
return str(dep["epoch"])+":"+dep["version"]+"-"+dep["release"]+"."+dep["arch"]
|
||||
|
||||
def dep_nevra(dep):
|
||||
"""Return the name-epoch:version-release.arch"""
|
||||
return dep["name"]+"-"+dep_evra(dep)
|
||||
|
||||
def projects_list(yb):
|
||||
"""Return a list of projects
|
||||
|
||||
:param yb: yum base object
|
||||
:type yb: YumBase
|
||||
:returns: List of project info dicts with name, summary, description, homepage, upstream_vcs
|
||||
:rtype: list of dicts
|
||||
"""
|
||||
try:
|
||||
ybl = yb.doPackageLists(pkgnarrow="available", showdups=False)
|
||||
except YumBaseError as e:
|
||||
raise ProjectsError("There was a problem listing projects: %s" % str(e))
|
||||
finally:
|
||||
yb.closeRpmDB()
|
||||
return sorted(map(yaps_to_project, ybl.available), key=lambda p: p["name"].lower())
|
||||
|
||||
|
||||
def projects_info(yb, project_names):
|
||||
"""Return details about specific projects
|
||||
|
||||
:param yb: yum base object
|
||||
:type yb: YumBase
|
||||
:param project_names: List of names of projects to get info about
|
||||
:type project_names: str
|
||||
:returns: List of project info dicts with yaps_to_project as well as epoch, version, release, etc.
|
||||
:rtype: list of dicts
|
||||
"""
|
||||
try:
|
||||
ybl = yb.doPackageLists(pkgnarrow="available", patterns=project_names, showdups=False)
|
||||
except YumBaseError as e:
|
||||
raise ProjectsError("There was a problem with info for %s: %s" % (project_names, str(e)))
|
||||
finally:
|
||||
yb.closeRpmDB()
|
||||
return sorted(map(yaps_to_project_info, ybl.available), key=lambda p: p["name"].lower())
|
||||
|
||||
|
||||
def projects_depsolve(yb, project_names):
|
||||
"""Return the dependencies for a list of projects
|
||||
|
||||
:param yb: yum base object
|
||||
:type yb: YumBase
|
||||
:param project_names: The projects to find the dependencies for
|
||||
:type project_names: List of Strings
|
||||
:returns: NEVRA's of the project and its dependencies
|
||||
:rtype: list of dicts
|
||||
"""
|
||||
try:
|
||||
# This resets the transaction
|
||||
yb.closeRpmDB()
|
||||
for p in project_names:
|
||||
yb.install(pattern=p)
|
||||
(rc, msg) = yb.buildTransaction()
|
||||
if rc not in [0, 1, 2]:
|
||||
raise ProjectsError("There was a problem depsolving %s: %s" % (project_names, msg))
|
||||
yb.tsInfo.makelists()
|
||||
deps = sorted(map(tm_to_dep, yb.tsInfo.installed + yb.tsInfo.depinstalled), key=lambda p: p["name"].lower())
|
||||
except YumBaseError as e:
|
||||
raise ProjectsError("There was a problem depsolving %s: %s" % (project_names, str(e)))
|
||||
finally:
|
||||
yb.closeRpmDB()
|
||||
return deps
|
||||
|
||||
def estimate_size(packages, block_size=4096):
|
||||
"""Estimate the installed size of a package list
|
||||
|
||||
:param packages: The packages to be installed
|
||||
:type packages: list of TransactionMember objects
|
||||
:param block_size: The block size to use for rounding up file sizes.
|
||||
:type block_size: int
|
||||
:returns: The estimated size of installed packages
|
||||
:rtype: int
|
||||
|
||||
Estimating actual requirements is difficult without the actual file sizes, which
|
||||
yum doesn't provide access to. So use the file count and block size to estimate
|
||||
a minimum size for each package.
|
||||
"""
|
||||
installed_size = 0
|
||||
for p in packages:
|
||||
installed_size += len(p.po.filelist) * block_size
|
||||
installed_size += p.po.installedsize
|
||||
return installed_size
|
||||
|
||||
def projects_depsolve_with_size(yb, project_names, with_core=True):
|
||||
"""Return the dependencies and installed size for a list of projects
|
||||
|
||||
:param yb: yum base object
|
||||
:type yb: YumBase
|
||||
:param project_names: The projects to find the dependencies for
|
||||
:type project_names: List of Strings
|
||||
:returns: installed size and a list of NEVRA's of the project and its dependencies
|
||||
:rtype: tuple of (int, list of dicts)
|
||||
"""
|
||||
try:
|
||||
# This resets the transaction
|
||||
yb.closeRpmDB()
|
||||
for p in project_names:
|
||||
yb.install(pattern=p)
|
||||
if with_core:
|
||||
yb.selectGroup("core", group_package_types=['mandatory', 'default', 'optional'])
|
||||
(rc, msg) = yb.buildTransaction()
|
||||
if rc not in [0, 1, 2]:
|
||||
raise ProjectsError("There was a problem depsolving %s: %s" % (project_names, msg))
|
||||
yb.tsInfo.makelists()
|
||||
installed_size = estimate_size(yb.tsInfo.installed + yb.tsInfo.depinstalled)
|
||||
deps = sorted(map(tm_to_dep, yb.tsInfo.installed + yb.tsInfo.depinstalled), key=lambda p: p["name"].lower())
|
||||
except YumBaseError as e:
|
||||
raise ProjectsError("There was a problem depsolving %s: %s" % (project_names, str(e)))
|
||||
finally:
|
||||
yb.closeRpmDB()
|
||||
return (installed_size, deps)
|
||||
|
||||
def modules_list(yb, module_names):
|
||||
"""Return a list of modules
|
||||
|
||||
:param yb: yum base object
|
||||
:type yb: YumBase
|
||||
:param offset: Number of modules to skip
|
||||
:type limit: int
|
||||
:param limit: Maximum number of modules to return
|
||||
:type limit: int
|
||||
:returns: List of module information and total count
|
||||
:rtype: tuple of a list of dicts and an Int
|
||||
|
||||
Modules don't exist in RHEL7 so this only returns projects
|
||||
and sets the type to "rpm"
|
||||
"""
|
||||
try:
|
||||
ybl = yb.doPackageLists(pkgnarrow="available", patterns=module_names, showdups=False)
|
||||
except YumBaseError as e:
|
||||
raise ProjectsError("There was a problem listing modules: %s" % str(e))
|
||||
finally:
|
||||
yb.closeRpmDB()
|
||||
return sorted(map(yaps_to_module, ybl.available), key=lambda p: p["name"].lower())
|
||||
|
||||
|
||||
def modules_info(yb, module_names):
|
||||
"""Return details about a module, including dependencies
|
||||
|
||||
:param yb: yum base object
|
||||
:type yb: YumBase
|
||||
:param module_names: Names of the modules to get info about
|
||||
:type module_names: str
|
||||
:returns: List of dicts with module details and dependencies.
|
||||
:rtype: list of dicts
|
||||
"""
|
||||
try:
|
||||
# Get the info about each module
|
||||
ybl = yb.doPackageLists(pkgnarrow="available", patterns=module_names, showdups=False)
|
||||
except YumBaseError as e:
|
||||
raise ProjectsError("There was a problem with info for %s: %s" % (module_names, str(e)))
|
||||
finally:
|
||||
yb.closeRpmDB()
|
||||
|
||||
modules = sorted(map(yaps_to_project, ybl.available), key=lambda p: p["name"].lower())
|
||||
# Add the dependency info to each one
|
||||
for module in modules:
|
||||
module["dependencies"] = projects_depsolve(yb, [module["name"]])
|
||||
|
||||
return modules
|
604
src/pylorax/api/queue.py
Normal file
604
src/pylorax/api/queue.py
Normal file
@ -0,0 +1,604 @@
|
||||
# Copyright (C) 2018 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
""" Functions to monitor compose queue and run anaconda"""
|
||||
import logging
|
||||
log = logging.getLogger("pylorax")
|
||||
|
||||
import os
|
||||
import grp
|
||||
from glob import glob
|
||||
import multiprocessing as mp
|
||||
import pytoml as toml
|
||||
import pwd
|
||||
import shutil
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE
|
||||
import time
|
||||
|
||||
from pylorax.api.compose import move_compose_results
|
||||
from pylorax.api.recipes import recipe_from_file
|
||||
from pylorax.base import DataHolder
|
||||
from pylorax.creator import run_creator
|
||||
from pylorax.sysutils import joinpaths
|
||||
|
||||
def start_queue_monitor(cfg, uid, gid):
|
||||
"""Start the queue monitor as a mp process
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uid: User ID that owns the queue
|
||||
:type uid: int
|
||||
:param gid: Group ID that owns the queue
|
||||
:type gid: int
|
||||
:returns: None
|
||||
"""
|
||||
lib_dir = cfg.get("composer", "lib_dir")
|
||||
share_dir = cfg.get("composer", "share_dir")
|
||||
monitor_cfg = DataHolder(composer_dir=lib_dir, share_dir=share_dir, uid=uid, gid=gid)
|
||||
p = mp.Process(target=monitor, args=(monitor_cfg,))
|
||||
p.daemon = True
|
||||
p.start()
|
||||
|
||||
def monitor(cfg):
|
||||
"""Monitor the queue for new compose requests
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: DataHolder
|
||||
:returns: Does not return
|
||||
|
||||
The queue has 2 subdirectories, new and run. When a compose is ready to be run
|
||||
a symlink to the uniquely named results directory should be placed in ./queue/new/
|
||||
|
||||
When the it is ready to be run (it is checked every 30 seconds or after a previous
|
||||
compose is finished) the symlink will be moved into ./queue/run/ and a STATUS file
|
||||
will be created in the results directory.
|
||||
|
||||
STATUS can contain one of: RUNNING, FINISHED, FAILED
|
||||
|
||||
If the system is restarted while a compose is running it will move any old symlinks
|
||||
from ./queue/run/ to ./queue/new/ and rerun them.
|
||||
"""
|
||||
def queue_sort(uuid):
|
||||
"""Sort the queue entries by their mtime, not their names"""
|
||||
return os.stat(joinpaths(cfg.composer_dir, "queue/new", uuid)).st_mtime
|
||||
|
||||
# Move any symlinks in the run queue back to the new queue
|
||||
for link in os.listdir(joinpaths(cfg.composer_dir, "queue/run")):
|
||||
src = joinpaths(cfg.composer_dir, "queue/run", link)
|
||||
dst = joinpaths(cfg.composer_dir, "queue/new", link)
|
||||
os.rename(src, dst)
|
||||
log.debug("Moved unfinished compose %s back to new state", src)
|
||||
|
||||
while True:
|
||||
uuids = sorted(os.listdir(joinpaths(cfg.composer_dir, "queue/new")), key=queue_sort)
|
||||
|
||||
# Pick the oldest and move it into ./run/
|
||||
if not uuids:
|
||||
# No composes left to process, sleep for a bit
|
||||
time.sleep(30)
|
||||
else:
|
||||
src = joinpaths(cfg.composer_dir, "queue/new", uuids[0])
|
||||
dst = joinpaths(cfg.composer_dir, "queue/run", uuids[0])
|
||||
try:
|
||||
os.rename(src, dst)
|
||||
except OSError:
|
||||
# The symlink may vanish if uuid_cancel() has been called
|
||||
continue
|
||||
|
||||
log.info("Starting new compose: %s", dst)
|
||||
open(joinpaths(dst, "STATUS"), "w").write("RUNNING\n")
|
||||
|
||||
try:
|
||||
make_compose(cfg, os.path.realpath(dst))
|
||||
log.info("Finished building %s, results are in %s", dst, os.path.realpath(dst))
|
||||
open(joinpaths(dst, "STATUS"), "w").write("FINISHED\n")
|
||||
except Exception:
|
||||
import traceback
|
||||
log.error("traceback: %s", traceback.format_exc())
|
||||
|
||||
# TODO - Write the error message to an ERROR-LOG file to include with the status
|
||||
# log.error("Error running compose: %s", e)
|
||||
open(joinpaths(dst, "STATUS"), "w").write("FAILED\n")
|
||||
|
||||
os.unlink(dst)
|
||||
|
||||
def make_compose(cfg, results_dir):
|
||||
"""Run anaconda with the final-kickstart.ks from results_dir
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: DataHolder
|
||||
:param results_dir: The directory containing the metadata and results for the build
|
||||
:type results_dir: str
|
||||
:returns: Nothing
|
||||
:raises: May raise various exceptions
|
||||
|
||||
This takes the final-kickstart.ks, and the settings in config.toml and runs Anaconda
|
||||
in no-virt mode (directly on the host operating system). Exceptions should be caught
|
||||
at the higer level.
|
||||
|
||||
If there is a failure, the build artifacts will be cleaned up, and any logs will be
|
||||
moved into logs/anaconda/ and their ownership will be set to the user from the cfg
|
||||
object.
|
||||
"""
|
||||
|
||||
# Check on the ks's presence
|
||||
ks_path = joinpaths(results_dir, "final-kickstart.ks")
|
||||
if not os.path.exists(ks_path):
|
||||
raise RuntimeError("Missing kickstart file at %s" % ks_path)
|
||||
|
||||
# The anaconda logs are copied into ./anaconda/ in this directory
|
||||
log_dir = joinpaths(results_dir, "logs/")
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
|
||||
# Load the compose configuration
|
||||
cfg_path = joinpaths(results_dir, "config.toml")
|
||||
if not os.path.exists(cfg_path):
|
||||
raise RuntimeError("Missing config.toml for %s" % results_dir)
|
||||
cfg_dict = toml.loads(open(cfg_path, "r").read())
|
||||
|
||||
# The keys in cfg_dict correspond to the arguments setup in livemedia-creator
|
||||
# keys that define what to build should be setup in compose_args, and keys with
|
||||
# defaults should be setup here.
|
||||
|
||||
# Make sure that image_name contains no path components
|
||||
cfg_dict["image_name"] = os.path.basename(cfg_dict["image_name"])
|
||||
|
||||
# Only support novirt installation, set some other defaults
|
||||
cfg_dict["no_virt"] = True
|
||||
cfg_dict["disk_image"] = None
|
||||
cfg_dict["fs_image"] = None
|
||||
cfg_dict["keep_image"] = False
|
||||
cfg_dict["domacboot"] = False
|
||||
cfg_dict["anaconda_args"] = ""
|
||||
cfg_dict["proxy"] = ""
|
||||
cfg_dict["armplatform"] = ""
|
||||
cfg_dict["squashfs_args"] = None
|
||||
|
||||
cfg_dict["lorax_templates"] = cfg.share_dir
|
||||
cfg_dict["tmp"] = "/var/tmp/"
|
||||
cfg_dict["dracut_args"] = None # Use default args for dracut
|
||||
|
||||
# Compose things in a temporary directory inside the results directory
|
||||
cfg_dict["result_dir"] = joinpaths(results_dir, "compose")
|
||||
os.makedirs(cfg_dict["result_dir"])
|
||||
|
||||
install_cfg = DataHolder(**cfg_dict)
|
||||
|
||||
# Some kludges for the 99-copy-logs %post, failure in it will crash the build
|
||||
for f in ["/tmp/NOSAVE_INPUT_KS", "/tmp/NOSAVE_LOGS"]:
|
||||
open(f, "w")
|
||||
|
||||
# Placing a CANCEL file in the results directory will make execWithRedirect send anaconda a SIGTERM
|
||||
def cancel_build():
|
||||
return os.path.exists(joinpaths(results_dir, "CANCEL"))
|
||||
|
||||
log.debug("cfg = %s", install_cfg)
|
||||
try:
|
||||
test_path = joinpaths(results_dir, "TEST")
|
||||
if os.path.exists(test_path):
|
||||
# Pretend to run the compose
|
||||
time.sleep(10)
|
||||
try:
|
||||
test_mode = int(open(test_path, "r").read())
|
||||
except Exception:
|
||||
test_mode = 1
|
||||
if test_mode == 1:
|
||||
raise RuntimeError("TESTING FAILED compose")
|
||||
else:
|
||||
open(joinpaths(results_dir, install_cfg.image_name), "w").write("TEST IMAGE")
|
||||
else:
|
||||
run_creator(install_cfg, callback_func=cancel_build)
|
||||
|
||||
# Extract the results of the compose into results_dir and cleanup the compose directory
|
||||
move_compose_results(install_cfg, results_dir)
|
||||
finally:
|
||||
# Make sure that everything under the results directory is owned by the user
|
||||
user = pwd.getpwuid(cfg.uid).pw_name
|
||||
group = grp.getgrgid(cfg.gid).gr_name
|
||||
log.debug("Install finished, chowning results to %s:%s", user, group)
|
||||
subprocess.call(["chown", "-R", "%s:%s" % (user, group), results_dir])
|
||||
|
||||
def get_compose_type(results_dir):
|
||||
"""Return the type of composition.
|
||||
|
||||
:param results_dir: The directory containing the metadata and results for the build
|
||||
:type results_dir: str
|
||||
:returns: The type of compose (eg. 'tar')
|
||||
:rtype: str
|
||||
:raises: RuntimeError if no kickstart template can be found.
|
||||
"""
|
||||
# Should only be 2 kickstarts, the final-kickstart.ks and the template
|
||||
t = [os.path.basename(ks)[:-3] for ks in glob(joinpaths(results_dir, "*.ks"))
|
||||
if "final-kickstart" not in ks]
|
||||
if len(t) != 1:
|
||||
raise RuntimeError("Cannot find ks template for build %s" % os.path.basename(results_dir))
|
||||
return t[0]
|
||||
|
||||
def compose_detail(results_dir):
|
||||
"""Return details about the build.
|
||||
|
||||
:param results_dir: The directory containing the metadata and results for the build
|
||||
:type results_dir: str
|
||||
:returns: A dictionary with details about the compose
|
||||
:rtype: dict
|
||||
:raises: IOError if it cannot read the directory, STATUS, or blueprint file.
|
||||
|
||||
The following details are included in the dict:
|
||||
|
||||
* id - The uuid of the comoposition
|
||||
* queue_status - The final status of the composition (FINISHED or FAILED)
|
||||
* timestamp - The time of the last status change
|
||||
* compose_type - The type of output generated (tar, iso, etc.)
|
||||
* blueprint - Blueprint name
|
||||
* version - Blueprint version
|
||||
* image_size - Size of the image, if finished. 0 otherwise.
|
||||
"""
|
||||
build_id = os.path.basename(os.path.abspath(results_dir))
|
||||
status = open(joinpaths(results_dir, "STATUS")).read().strip()
|
||||
mtime = os.stat(joinpaths(results_dir, "STATUS")).st_mtime
|
||||
blueprint = recipe_from_file(joinpaths(results_dir, "blueprint.toml"))
|
||||
|
||||
compose_type = get_compose_type(results_dir)
|
||||
|
||||
image_path = get_image_name(results_dir)[1]
|
||||
if status == "FINISHED" and os.path.exists(image_path):
|
||||
image_size = os.stat(image_path).st_size
|
||||
else:
|
||||
image_size = 0
|
||||
|
||||
return {"id": build_id,
|
||||
"queue_status": status,
|
||||
"timestamp": mtime,
|
||||
"compose_type": compose_type,
|
||||
"blueprint": blueprint["name"],
|
||||
"version": blueprint["version"],
|
||||
"image_size": image_size
|
||||
}
|
||||
|
||||
def queue_status(cfg):
|
||||
"""Return details about what is in the queue.
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:returns: A list of the new composes, and a list of the running composes
|
||||
:rtype: dict
|
||||
|
||||
This returns a dict with 2 lists. "new" is the list of uuids that are waiting to be built,
|
||||
and "run" has the uuids that are being built (currently limited to 1 at a time).
|
||||
"""
|
||||
queue_dir = joinpaths(cfg.get("composer", "lib_dir"), "queue")
|
||||
new_queue = [os.path.realpath(p) for p in glob(joinpaths(queue_dir, "new/*"))]
|
||||
run_queue = [os.path.realpath(p) for p in glob(joinpaths(queue_dir, "run/*"))]
|
||||
|
||||
new_details = []
|
||||
for n in new_queue:
|
||||
try:
|
||||
d = compose_detail(n)
|
||||
except IOError:
|
||||
continue
|
||||
new_details.append(d)
|
||||
|
||||
run_details = []
|
||||
for r in run_queue:
|
||||
try:
|
||||
d = compose_detail(r)
|
||||
except IOError:
|
||||
continue
|
||||
run_details.append(d)
|
||||
|
||||
return {
|
||||
"new": new_details,
|
||||
"run": run_details
|
||||
}
|
||||
|
||||
def uuid_status(cfg, uuid):
|
||||
"""Return the details of a specific UUID compose
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:returns: Details about the build
|
||||
:rtype: dict or None
|
||||
|
||||
Returns the same dict as `compose_details()`
|
||||
"""
|
||||
uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
|
||||
try:
|
||||
return compose_detail(uuid_dir)
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
def build_status(cfg, status_filter=None):
|
||||
"""Return the details of finished or failed builds
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param status_filter: What builds to return. None == all, "FINISHED", or "FAILED"
|
||||
:type status_filter: str
|
||||
:returns: A list of the build details (from compose_details)
|
||||
:rtype: list of dicts
|
||||
|
||||
This returns a list of build details for each of the matching builds on the
|
||||
system. It does not return the status of builds that have not been finished.
|
||||
Use queue_status() for those.
|
||||
"""
|
||||
if status_filter:
|
||||
status_filter = [status_filter]
|
||||
else:
|
||||
status_filter = ["FINISHED", "FAILED"]
|
||||
|
||||
results = []
|
||||
result_dir = joinpaths(cfg.get("composer", "lib_dir"), "results")
|
||||
for build in glob(result_dir + "/*"):
|
||||
log.debug("Checking status of build %s", build)
|
||||
|
||||
try:
|
||||
status = open(joinpaths(build, "STATUS"), "r").read().strip()
|
||||
if status in status_filter:
|
||||
results.append(compose_detail(build))
|
||||
except IOError:
|
||||
pass
|
||||
return results
|
||||
|
||||
def uuid_cancel(cfg, uuid):
|
||||
"""Cancel a build and delete its results
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:returns: True if it was canceled and deleted
|
||||
:rtype: bool
|
||||
|
||||
Only call this if the build status is WAITING or RUNNING
|
||||
"""
|
||||
# This status can change (and probably will) while it is in the middle of doing this:
|
||||
# It can move from WAITING -> RUNNING or it can move from RUNNING -> FINISHED|FAILED
|
||||
|
||||
# If it is in WAITING remove the symlink and then check to make sure it didn't show up
|
||||
# in RUNNING
|
||||
queue_dir = joinpaths(cfg.get("composer", "lib_dir"), "queue")
|
||||
uuid_new = joinpaths(queue_dir, "new", uuid)
|
||||
if os.path.exists(uuid_new):
|
||||
try:
|
||||
os.unlink(uuid_new)
|
||||
except OSError:
|
||||
# The symlink may vanish if the queue monitor started the build
|
||||
pass
|
||||
uuid_run = joinpaths(queue_dir, "run", uuid)
|
||||
if not os.path.exists(uuid_run):
|
||||
# Successfully removed it before the build started
|
||||
return uuid_delete(cfg, uuid)
|
||||
|
||||
# Tell the build to stop running
|
||||
cancel_path = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid, "CANCEL")
|
||||
open(cancel_path, "w").write("\n")
|
||||
|
||||
# Wait for status to move to FAILED
|
||||
started = time.time()
|
||||
while True:
|
||||
status = uuid_status(cfg, uuid)
|
||||
if status is None or status["queue_status"] == "FAILED":
|
||||
break
|
||||
|
||||
# Is this taking too long? Exit anyway and try to cleanup.
|
||||
if time.time() > started + (10 * 60):
|
||||
log.error("Failed to cancel the build of %s", uuid)
|
||||
break
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
# Remove the partial results
|
||||
uuid_delete(cfg, uuid)
|
||||
|
||||
def uuid_delete(cfg, uuid):
|
||||
"""Delete all of the results from a compose
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:returns: True if it was deleted
|
||||
:rtype: bool
|
||||
:raises: This will raise an error if the delete failed
|
||||
"""
|
||||
uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
|
||||
if not uuid_dir or len(uuid_dir) < 10:
|
||||
raise RuntimeError("Directory length is too short: %s" % uuid_dir)
|
||||
shutil.rmtree(uuid_dir)
|
||||
return True
|
||||
|
||||
def uuid_info(cfg, uuid):
|
||||
"""Return information about the composition
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:returns: dictionary of information about the composition
|
||||
:rtype: dict
|
||||
:raises: RuntimeError if there was a problem
|
||||
|
||||
This will return a dict with the following fields populated:
|
||||
|
||||
* id - The uuid of the comoposition
|
||||
* config - containing the configuration settings used to run Anaconda
|
||||
* blueprint - The depsolved blueprint used to generate the kickstart
|
||||
* commit - The (local) git commit hash for the blueprint used
|
||||
* deps - The NEVRA of all of the dependencies used in the composition
|
||||
* compose_type - The type of output generated (tar, iso, etc.)
|
||||
* queue_status - The final status of the composition (FINISHED or FAILED)
|
||||
"""
|
||||
uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
|
||||
if not os.path.exists(uuid_dir):
|
||||
raise RuntimeError("%s is not a valid build_id" % uuid)
|
||||
|
||||
# Load the compose configuration
|
||||
cfg_path = joinpaths(uuid_dir, "config.toml")
|
||||
if not os.path.exists(cfg_path):
|
||||
raise RuntimeError("Missing config.toml for %s" % uuid)
|
||||
cfg_dict = toml.loads(open(cfg_path, "r").read())
|
||||
|
||||
frozen_path = joinpaths(uuid_dir, "frozen.toml")
|
||||
if not os.path.exists(frozen_path):
|
||||
raise RuntimeError("Missing frozen.toml for %s" % uuid)
|
||||
frozen_dict = toml.loads(open(frozen_path, "r").read())
|
||||
|
||||
deps_path = joinpaths(uuid_dir, "deps.toml")
|
||||
if not os.path.exists(deps_path):
|
||||
raise RuntimeError("Missing deps.toml for %s" % uuid)
|
||||
deps_dict = toml.loads(open(deps_path, "r").read())
|
||||
|
||||
details = compose_detail(uuid_dir)
|
||||
|
||||
commit_path = joinpaths(uuid_dir, "COMMIT")
|
||||
if not os.path.exists(commit_path):
|
||||
raise RuntimeError("Missing commit hash for %s" % uuid)
|
||||
commit_id = open(commit_path, "r").read().strip()
|
||||
|
||||
return {"id": uuid,
|
||||
"config": cfg_dict,
|
||||
"blueprint": frozen_dict,
|
||||
"commit": commit_id,
|
||||
"deps": deps_dict,
|
||||
"compose_type": details["compose_type"],
|
||||
"queue_status": details["queue_status"],
|
||||
"image_size": details["image_size"]
|
||||
}
|
||||
|
||||
def uuid_tar(cfg, uuid, metadata=False, image=False, logs=False):
|
||||
"""Return a tar of the build data
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:param metadata: Set to true to include all the metadata needed to reproduce the build
|
||||
:type metadata: bool
|
||||
:param image: Set to true to include the output image
|
||||
:type image: bool
|
||||
:param logs: Set to true to include the logs from the build
|
||||
:type logs: bool
|
||||
:returns: A stream of bytes from tar
|
||||
:rtype: A generator
|
||||
:raises: RuntimeError if there was a problem (eg. missing config file)
|
||||
|
||||
This yields an uncompressed tar's data to the caller. It includes
|
||||
the selected data to the caller by returning the Popen stdout from the tar process.
|
||||
"""
|
||||
uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
|
||||
if not os.path.exists(uuid_dir):
|
||||
raise RuntimeError("%s is not a valid build_id" % uuid)
|
||||
|
||||
# Load the compose configuration
|
||||
cfg_path = joinpaths(uuid_dir, "config.toml")
|
||||
if not os.path.exists(cfg_path):
|
||||
raise RuntimeError("Missing config.toml for %s" % uuid)
|
||||
cfg_dict = toml.loads(open(cfg_path, "r").read())
|
||||
image_name = cfg_dict["image_name"]
|
||||
|
||||
def include_file(f):
|
||||
if f.endswith("/logs"):
|
||||
return logs
|
||||
if f.endswith(image_name):
|
||||
return image
|
||||
return metadata
|
||||
filenames = [os.path.basename(f) for f in glob(joinpaths(uuid_dir, "*")) if include_file(f)]
|
||||
|
||||
tar = Popen(["tar", "-C", uuid_dir, "-cf-"] + filenames, stdout=PIPE)
|
||||
return tar.stdout
|
||||
|
||||
def uuid_image(cfg, uuid):
|
||||
"""Return the filename and full path of the build's image file
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:returns: The image filename and full path
|
||||
:rtype: tuple of strings
|
||||
:raises: RuntimeError if there was a problem (eg. invalid uuid, missing config file)
|
||||
"""
|
||||
uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
|
||||
return get_image_name(uuid_dir)
|
||||
|
||||
def get_image_name(uuid_dir):
|
||||
"""Return the filename and full path of the build's image file
|
||||
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:returns: The image filename and full path
|
||||
:rtype: tuple of strings
|
||||
:raises: RuntimeError if there was a problem (eg. invalid uuid, missing config file)
|
||||
"""
|
||||
uuid = os.path.basename(os.path.abspath(uuid_dir))
|
||||
if not os.path.exists(uuid_dir):
|
||||
raise RuntimeError("%s is not a valid build_id" % uuid)
|
||||
|
||||
# Load the compose configuration
|
||||
cfg_path = joinpaths(uuid_dir, "config.toml")
|
||||
if not os.path.exists(cfg_path):
|
||||
raise RuntimeError("Missing config.toml for %s" % uuid)
|
||||
cfg_dict = toml.loads(open(cfg_path, "r").read())
|
||||
image_name = cfg_dict["image_name"]
|
||||
|
||||
return (image_name, joinpaths(uuid_dir, image_name))
|
||||
|
||||
def uuid_log(cfg, uuid, size=1024):
|
||||
"""Return `size` kbytes from the end of the anaconda.log
|
||||
|
||||
:param cfg: Configuration settings
|
||||
:type cfg: ComposerConfig
|
||||
:param uuid: The UUID of the build
|
||||
:type uuid: str
|
||||
:param size: Number of kbytes to read. Default is 1024
|
||||
:type size: int
|
||||
:returns: Up to `size` kbytes from the end of the log
|
||||
:rtype: str
|
||||
:raises: RuntimeError if there was a problem (eg. no log file available)
|
||||
|
||||
This function tries to return lines from the end of the log, it will
|
||||
attempt to start on a line boundry, and may return less than `size` kbytes.
|
||||
"""
|
||||
uuid_dir = joinpaths(cfg.get("composer", "lib_dir"), "results", uuid)
|
||||
if not os.path.exists(uuid_dir):
|
||||
raise RuntimeError("%s is not a valid build_id" % uuid)
|
||||
|
||||
# While a build is running the logs will be in /tmp/anaconda.log and when it
|
||||
# has finished they will be in the results directory
|
||||
status = uuid_status(cfg, uuid)
|
||||
if status is None:
|
||||
raise RuntimeError("Status is missing for %s" % uuid)
|
||||
|
||||
if status["queue_status"] == "RUNNING":
|
||||
log_path = "/tmp/anaconda.log"
|
||||
else:
|
||||
log_path = joinpaths(uuid_dir, "logs", "anaconda", "anaconda.log")
|
||||
if not os.path.exists(log_path):
|
||||
raise RuntimeError("No anaconda.log available.")
|
||||
|
||||
with open(log_path, "r") as f:
|
||||
f.seek(0, 2)
|
||||
end = f.tell()
|
||||
if end < 1024 * size:
|
||||
f.seek(0, 0)
|
||||
else:
|
||||
f.seek(end - (1024 * size))
|
||||
# Find the start of the next line and return the rest
|
||||
f.readline()
|
||||
return f.read()
|
882
src/pylorax/api/recipes.py
Normal file
882
src/pylorax/api/recipes.py
Normal file
@ -0,0 +1,882 @@
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import gi
|
||||
gi.require_version("Ggit", "1.0")
|
||||
from gi.repository import Ggit as Git
|
||||
from gi.repository import Gio
|
||||
from gi.repository import GLib
|
||||
|
||||
import os
|
||||
import pytoml as toml
|
||||
import semantic_version as semver
|
||||
|
||||
from pylorax.api.projects import dep_evra
|
||||
from pylorax.base import DataHolder
|
||||
from pylorax.sysutils import joinpaths
|
||||
|
||||
|
||||
class CommitTimeValError(Exception):
|
||||
pass
|
||||
|
||||
class RecipeFileError(Exception):
|
||||
pass
|
||||
|
||||
class RecipeError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Recipe(dict):
|
||||
"""A Recipe of package and modules
|
||||
|
||||
This is a subclass of dict that enforces the constructor arguments
|
||||
and adds a .filename property to return the recipe's filename,
|
||||
and a .toml() function to return the recipe as a TOML string.
|
||||
"""
|
||||
def __init__(self, name, description, version, modules, packages):
|
||||
# Check that version is empty or semver compatible
|
||||
if version:
|
||||
semver.Version(version)
|
||||
|
||||
# Make sure modules and packages are listed by their case-insensitive names
|
||||
if modules is not None:
|
||||
modules = sorted(modules, key=lambda m: m["name"].lower())
|
||||
if packages is not None:
|
||||
packages = sorted(packages, key=lambda p: p["name"].lower())
|
||||
dict.__init__(self, name=name,
|
||||
description=description,
|
||||
version=version,
|
||||
modules=modules,
|
||||
packages=packages)
|
||||
|
||||
@property
|
||||
def package_names(self):
|
||||
"""Return the names of the packages"""
|
||||
return [p["name"] for p in self["packages"] or []]
|
||||
|
||||
@property
|
||||
def module_names(self):
|
||||
"""Return the names of the modules"""
|
||||
return [m["name"] for m in self["modules"] or []]
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
"""Return the Recipe's filename
|
||||
|
||||
Replaces spaces in the name with '-' and appends .toml
|
||||
"""
|
||||
return recipe_filename(self.get("name"))
|
||||
|
||||
def toml(self):
|
||||
"""Return the Recipe in TOML format"""
|
||||
return toml.dumps(self).encode("UTF-8")
|
||||
|
||||
def bump_version(self, old_version=None):
|
||||
"""semver recipe version number bump
|
||||
|
||||
:param old_version: An optional old version number
|
||||
:type old_version: str
|
||||
:returns: The new version number or None
|
||||
:rtype: str
|
||||
:raises: ValueError
|
||||
|
||||
If neither have a version, 0.0.1 is returned
|
||||
If there is no old version the new version is checked and returned
|
||||
If there is no new version, but there is a old one, bump its patch level
|
||||
If the old and new versions are the same, bump the patch level
|
||||
If they are different, check and return the new version
|
||||
"""
|
||||
new_version = self.get("version")
|
||||
if not new_version and not old_version:
|
||||
self["version"] = "0.0.1"
|
||||
|
||||
elif new_version and not old_version:
|
||||
semver.Version(new_version)
|
||||
self["version"] = new_version
|
||||
|
||||
elif not new_version or new_version == old_version:
|
||||
new_version = str(semver.Version(old_version).next_patch())
|
||||
self["version"] = new_version
|
||||
|
||||
else:
|
||||
semver.Version(new_version)
|
||||
self["version"] = new_version
|
||||
|
||||
# Return the new version
|
||||
return str(semver.Version(self["version"]))
|
||||
|
||||
def freeze(self, deps):
|
||||
""" Return a new Recipe with full module and package NEVRA
|
||||
|
||||
:param deps: A list of dependency NEVRA to use to fill in the modules and packages
|
||||
:type deps: list(
|
||||
:returns: A new Recipe object
|
||||
:rtype: Recipe
|
||||
"""
|
||||
module_names = self.module_names
|
||||
package_names = self.package_names
|
||||
|
||||
new_modules = []
|
||||
new_packages = []
|
||||
for dep in deps:
|
||||
if dep["name"] in package_names:
|
||||
new_packages.append(RecipePackage(dep["name"], dep_evra(dep)))
|
||||
elif dep["name"] in module_names:
|
||||
new_modules.append(RecipeModule(dep["name"], dep_evra(dep)))
|
||||
|
||||
return Recipe(self["name"], self["description"], self["version"],
|
||||
new_modules, new_packages)
|
||||
|
||||
class RecipeModule(dict):
|
||||
def __init__(self, name, version):
|
||||
dict.__init__(self, name=name, version=version)
|
||||
|
||||
class RecipePackage(RecipeModule):
|
||||
pass
|
||||
|
||||
def recipe_from_file(recipe_path):
|
||||
"""Return a recipe file as a Recipe object
|
||||
|
||||
:param recipe_path: Path to the recipe fila
|
||||
:type recipe_path: str
|
||||
:returns: A Recipe object
|
||||
:rtype: Recipe
|
||||
"""
|
||||
with open(recipe_path, 'rb') as f:
|
||||
return recipe_from_toml(f.read())
|
||||
|
||||
def recipe_from_toml(recipe_str):
|
||||
"""Create a Recipe object from a toml string.
|
||||
|
||||
:param recipe_str: The Recipe TOML string
|
||||
:type recipe_str: str
|
||||
:returns: A Recipe object
|
||||
:rtype: Recipe
|
||||
:raises: TomlError
|
||||
"""
|
||||
recipe_dict = toml.loads(recipe_str)
|
||||
return recipe_from_dict(recipe_dict)
|
||||
|
||||
def recipe_from_dict(recipe_dict):
|
||||
"""Create a Recipe object from a plain dict.
|
||||
|
||||
:param recipe_dict: A plain dict of the recipe
|
||||
:type recipe_dict: dict
|
||||
:returns: A Recipe object
|
||||
:rtype: Recipe
|
||||
:raises: RecipeError
|
||||
"""
|
||||
# Make RecipeModule objects from the toml
|
||||
# The TOML may not have modules or packages in it. Set them to None in this case
|
||||
try:
|
||||
if recipe_dict.get("modules"):
|
||||
modules = [RecipeModule(m.get("name"), m.get("version")) for m in recipe_dict["modules"]]
|
||||
else:
|
||||
modules = []
|
||||
if recipe_dict.get("packages"):
|
||||
packages = [RecipePackage(p.get("name"), p.get("version")) for p in recipe_dict["packages"]]
|
||||
else:
|
||||
packages = []
|
||||
name = recipe_dict["name"]
|
||||
description = recipe_dict["description"]
|
||||
version = recipe_dict.get("version", None)
|
||||
except KeyError as e:
|
||||
raise RecipeError("There was a problem parsing the recipe: %s" % str(e))
|
||||
|
||||
return Recipe(name, description, version, modules, packages)
|
||||
|
||||
def gfile(path):
|
||||
"""Convert a string path to GFile for use with Git"""
|
||||
return Gio.file_new_for_path(path)
|
||||
|
||||
def recipe_filename(name):
|
||||
"""Return the toml filename for a recipe
|
||||
|
||||
Replaces spaces with '-' and appends '.toml'
|
||||
"""
|
||||
# XXX Raise and error if this is empty?
|
||||
return name.replace(" ", "-") + ".toml"
|
||||
|
||||
def head_commit(repo, branch):
|
||||
"""Get the branch's HEAD Commit Object
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:returns: Branch's head commit
|
||||
:rtype: Git.Commit
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
branch_obj = repo.lookup_branch(branch, Git.BranchType.LOCAL)
|
||||
commit_id = branch_obj.get_target()
|
||||
return repo.lookup(commit_id, Git.Commit)
|
||||
|
||||
def prepare_commit(repo, branch, builder):
|
||||
"""Prepare for a commit
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param builder: instance of TreeBuilder
|
||||
:type builder: TreeBuilder
|
||||
:returns: (Tree, Sig, Ref)
|
||||
:rtype: tuple
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
tree_id = builder.write()
|
||||
tree = repo.lookup(tree_id, Git.Tree)
|
||||
sig = Git.Signature.new_now("bdcs-api-server", "user-email")
|
||||
ref = "refs/heads/%s" % branch
|
||||
return (tree, sig, ref)
|
||||
|
||||
def open_or_create_repo(path):
|
||||
"""Open an existing repo, or create a new one
|
||||
|
||||
:param path: path to recipe directory
|
||||
:type path: string
|
||||
:returns: A repository object
|
||||
:rtype: Git.Repository
|
||||
:raises: Can raise errors from Ggit
|
||||
|
||||
A bare git repo will be created in the git directory of the specified path.
|
||||
If a repo already exists it will be opened and returned instead of
|
||||
creating a new one.
|
||||
"""
|
||||
Git.init()
|
||||
git_path = joinpaths(path, "git")
|
||||
if os.path.exists(joinpaths(git_path, "HEAD")):
|
||||
return Git.Repository.open(gfile(git_path))
|
||||
|
||||
repo = Git.Repository.init_repository(gfile(git_path), True)
|
||||
|
||||
# Make an initial empty commit
|
||||
sig = Git.Signature.new_now("bdcs-api-server", "user-email")
|
||||
tree_id = repo.get_index().write_tree()
|
||||
tree = repo.lookup(tree_id, Git.Tree)
|
||||
repo.create_commit("HEAD", sig, sig, "UTF-8", "Initial Recipe repository commit", tree, [])
|
||||
return repo
|
||||
|
||||
def write_commit(repo, branch, filename, message, content):
|
||||
"""Make a new commit to a repository's branch
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: full path of the file to add
|
||||
:type filename: str
|
||||
:param message: The commit message
|
||||
:type message: str
|
||||
:param content: The data to write
|
||||
:type content: str
|
||||
:returns: OId of the new commit
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
try:
|
||||
parent_commit = head_commit(repo, branch)
|
||||
except GLib.GError:
|
||||
# Branch doesn't exist, make a new one based on master
|
||||
master_head = head_commit(repo, "master")
|
||||
repo.create_branch(branch, master_head, 0)
|
||||
parent_commit = head_commit(repo, branch)
|
||||
|
||||
parent_commit = head_commit(repo, branch)
|
||||
blob_id = repo.create_blob_from_buffer(content)
|
||||
|
||||
# Use treebuilder to make a new entry for this filename and blob
|
||||
parent_tree = parent_commit.get_tree()
|
||||
builder = repo.create_tree_builder_from_tree(parent_tree)
|
||||
builder.insert(filename, blob_id, Git.FileMode.BLOB)
|
||||
(tree, sig, ref) = prepare_commit(repo, branch, builder)
|
||||
return repo.create_commit(ref, sig, sig, "UTF-8", message, tree, [parent_commit])
|
||||
|
||||
def read_commit_spec(repo, spec):
|
||||
"""Return the raw content of the blob specified by the spec
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param spec: Git revparse spec
|
||||
:type spec: str
|
||||
:returns: Contents of the commit
|
||||
:rtype: str
|
||||
:raises: Can raise errors from Ggit
|
||||
|
||||
eg. To read the README file from master the spec is "master:README"
|
||||
"""
|
||||
commit_id = repo.revparse(spec).get_id()
|
||||
blob = repo.lookup(commit_id, Git.Blob)
|
||||
return blob.get_raw_content()
|
||||
|
||||
def read_commit(repo, branch, filename, commit=None):
|
||||
"""Return the contents of a file on a specific branch or commit.
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: filename to read
|
||||
:type filename: str
|
||||
:param commit: Optional commit hash
|
||||
:type commit: str
|
||||
:returns: The commit id, and the contents of the commit
|
||||
:rtype: tuple(str, str)
|
||||
:raises: Can raise errors from Ggit
|
||||
|
||||
If no commit is passed the master:filename is returned, otherwise it will be
|
||||
commit:filename
|
||||
"""
|
||||
if not commit:
|
||||
# Find the most recent commit for filename on the selected branch
|
||||
commits = list_commits(repo, branch, filename, 1)
|
||||
if not commits:
|
||||
raise RecipeError("No commits for %s on the %s branch." % (filename, branch))
|
||||
commit = commits[0].commit
|
||||
return (commit, read_commit_spec(repo, "%s:%s" % (commit, filename)))
|
||||
|
||||
def read_recipe_commit(repo, branch, recipe_name, commit=None):
|
||||
"""Read a recipe commit from git and return a Recipe object
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe_name: Recipe name to read
|
||||
:type recipe_name: str
|
||||
:param commit: Optional commit hash
|
||||
:type commit: str
|
||||
:returns: A Recipe object
|
||||
:rtype: Recipe
|
||||
:raises: Can raise errors from Ggit
|
||||
|
||||
If no commit is passed the master:filename is returned, otherwise it will be
|
||||
commit:filename
|
||||
"""
|
||||
(_, recipe_toml) = read_commit(repo, branch, recipe_filename(recipe_name), commit)
|
||||
return recipe_from_toml(recipe_toml)
|
||||
|
||||
def read_recipe_and_id(repo, branch, recipe_name, commit=None):
|
||||
"""Read a recipe commit and its id from git
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe_name: Recipe name to read
|
||||
:type recipe_name: str
|
||||
:param commit: Optional commit hash
|
||||
:type commit: str
|
||||
:returns: The commit id, and a Recipe object
|
||||
:rtype: tuple(str, Recipe)
|
||||
:raises: Can raise errors from Ggit
|
||||
|
||||
If no commit is passed the master:filename is returned, otherwise it will be
|
||||
commit:filename
|
||||
"""
|
||||
(commit_id, recipe_toml) = read_commit(repo, branch, recipe_filename(recipe_name), commit)
|
||||
return (commit_id, recipe_from_toml(recipe_toml))
|
||||
|
||||
def list_branch_files(repo, branch):
|
||||
"""Return a sorted list of the files on the branch HEAD
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:returns: A sorted list of the filenames
|
||||
:rtype: list(str)
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
commit = head_commit(repo, branch).get_id().to_string()
|
||||
return list_commit_files(repo, commit)
|
||||
|
||||
def list_commit_files(repo, commit):
|
||||
"""Return a sorted list of the files on a commit
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param commit: The commit hash to list
|
||||
:type commit: str
|
||||
:returns: A sorted list of the filenames
|
||||
:rtype: list(str)
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
commit_id = Git.OId.new_from_string(commit)
|
||||
commit_obj = repo.lookup(commit_id, Git.Commit)
|
||||
tree = commit_obj.get_tree()
|
||||
return sorted([tree.get(i).get_name() for i in range(0, tree.size())])
|
||||
|
||||
def delete_recipe(repo, branch, recipe_name):
|
||||
"""Delete a recipe from a branch.
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe_name: Recipe name to delete
|
||||
:type recipe_name: str
|
||||
:returns: OId of the new commit
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
return delete_file(repo, branch, recipe_filename(recipe_name))
|
||||
|
||||
def delete_file(repo, branch, filename):
|
||||
"""Delete a file from a branch.
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: filename to delete
|
||||
:type filename: str
|
||||
:returns: OId of the new commit
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
parent_commit = head_commit(repo, branch)
|
||||
parent_tree = parent_commit.get_tree()
|
||||
builder = repo.create_tree_builder_from_tree(parent_tree)
|
||||
builder.remove(filename)
|
||||
(tree, sig, ref) = prepare_commit(repo, branch, builder)
|
||||
message = "Recipe %s deleted" % filename
|
||||
return repo.create_commit(ref, sig, sig, "UTF-8", message, tree, [parent_commit])
|
||||
|
||||
def revert_recipe(repo, branch, recipe_name, commit):
|
||||
"""Revert the contents of a recipe to that of a previous commit
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe_name: Recipe name to revert
|
||||
:type recipe_name: str
|
||||
:param commit: Commit hash
|
||||
:type commit: str
|
||||
:returns: OId of the new commit
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
return revert_file(repo, branch, recipe_filename(recipe_name), commit)
|
||||
|
||||
def revert_file(repo, branch, filename, commit):
|
||||
"""Revert the contents of a file to that of a previous commit
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: filename to revert
|
||||
:type filename: str
|
||||
:param commit: Commit hash
|
||||
:type commit: str
|
||||
:returns: OId of the new commit
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
commit_id = Git.OId.new_from_string(commit)
|
||||
commit_obj = repo.lookup(commit_id, Git.Commit)
|
||||
revert_tree = commit_obj.get_tree()
|
||||
entry = revert_tree.get_by_name(filename)
|
||||
blob_id = entry.get_id()
|
||||
parent_commit = head_commit(repo, branch)
|
||||
|
||||
# Use treebuilder to modify the tree
|
||||
parent_tree = parent_commit.get_tree()
|
||||
builder = repo.create_tree_builder_from_tree(parent_tree)
|
||||
builder.insert(filename, blob_id, Git.FileMode.BLOB)
|
||||
(tree, sig, ref) = prepare_commit(repo, branch, builder)
|
||||
commit_hash = commit_id.to_string()
|
||||
message = "%s reverted to commit %s" % (filename, commit_hash)
|
||||
return repo.create_commit(ref, sig, sig, "UTF-8", message, tree, [parent_commit])
|
||||
|
||||
def commit_recipe(repo, branch, recipe):
|
||||
"""Commit a recipe to a branch
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe: Recipe to commit
|
||||
:type recipe: Recipe
|
||||
:returns: OId of the new commit
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
try:
|
||||
old_recipe = read_recipe_commit(repo, branch, recipe["name"])
|
||||
old_version = old_recipe["version"]
|
||||
except Exception:
|
||||
old_version = None
|
||||
|
||||
recipe.bump_version(old_version)
|
||||
recipe_toml = recipe.toml()
|
||||
message = "Recipe %s, version %s saved." % (recipe["name"], recipe["version"])
|
||||
return write_commit(repo, branch, recipe.filename, message, recipe_toml)
|
||||
|
||||
def commit_recipe_file(repo, branch, filename):
|
||||
"""Commit a recipe file to a branch
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: Path to the recipe file to commit
|
||||
:type filename: str
|
||||
:returns: OId of the new commit
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit or RecipeFileError
|
||||
"""
|
||||
try:
|
||||
recipe = recipe_from_file(filename)
|
||||
except IOError:
|
||||
raise RecipeFileError
|
||||
|
||||
return commit_recipe(repo, branch, recipe)
|
||||
|
||||
def commit_recipe_directory(repo, branch, directory):
|
||||
r"""Commit all \*.toml files from a directory, if they aren't already in git.
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param directory: The directory of \*.toml recipes to commit
|
||||
:type directory: str
|
||||
:returns: None
|
||||
:raises: Can raise errors from Ggit or RecipeFileError
|
||||
|
||||
Files with Toml or RecipeFileErrors will be skipped, and the remainder will
|
||||
be tried.
|
||||
"""
|
||||
dir_files = set([e for e in os.listdir(directory) if e.endswith(".toml")])
|
||||
branch_files = set(list_branch_files(repo, branch))
|
||||
new_files = dir_files.difference(branch_files)
|
||||
|
||||
for f in new_files:
|
||||
# Skip files with errors, but try the others
|
||||
try:
|
||||
commit_recipe_file(repo, branch, joinpaths(directory, f))
|
||||
except (RecipeFileError, toml.TomlError):
|
||||
pass
|
||||
|
||||
def tag_recipe_commit(repo, branch, recipe_name):
|
||||
"""Tag a file's most recent commit
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe_name: Recipe name to tag
|
||||
:type recipe_name: str
|
||||
:returns: Tag id or None if it failed.
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
|
||||
Uses tag_file_commit()
|
||||
"""
|
||||
return tag_file_commit(repo, branch, recipe_filename(recipe_name))
|
||||
|
||||
def tag_file_commit(repo, branch, filename):
|
||||
"""Tag a file's most recent commit
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: Filename to tag
|
||||
:type filename: str
|
||||
:returns: Tag id or None if it failed.
|
||||
:rtype: Git.OId
|
||||
:raises: Can raise errors from Ggit
|
||||
|
||||
This uses git tags, of the form `refs/tags/<branch>/<filename>/r<revision>`
|
||||
Only the most recent recipe commit can be tagged to prevent out of order tagging.
|
||||
Revisions start at 1 and increment for each new commit that is tagged.
|
||||
If the commit has already been tagged it will return false.
|
||||
"""
|
||||
file_commits = list_commits(repo, branch, filename)
|
||||
if not file_commits:
|
||||
return None
|
||||
|
||||
# Find the most recently tagged version (may not be one) and add 1 to it.
|
||||
for details in file_commits:
|
||||
if details.revision is not None:
|
||||
new_revision = details.revision + 1
|
||||
break
|
||||
else:
|
||||
new_revision = 1
|
||||
|
||||
name = "%s/%s/r%d" % (branch, filename, new_revision)
|
||||
sig = Git.Signature.new_now("bdcs-api-server", "user-email")
|
||||
commit_id = Git.OId.new_from_string(file_commits[0].commit)
|
||||
commit = repo.lookup(commit_id, Git.Commit)
|
||||
return repo.create_tag(name, commit, sig, name, Git.CreateFlags.NONE)
|
||||
|
||||
def find_commit_tag(repo, branch, filename, commit_id):
|
||||
"""Find the tag that matches the commit_id
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: filename to revert
|
||||
:type filename: str
|
||||
:param commit_id: The commit id to check
|
||||
:type commit_id: Git.OId
|
||||
:returns: The tag or None if there isn't one
|
||||
:rtype: str or None
|
||||
|
||||
There should be only 1 tag pointing to a commit, but there may not
|
||||
be a tag at all.
|
||||
|
||||
The tag will look like: 'refs/tags/<branch>/<filename>/r<revision>'
|
||||
"""
|
||||
pattern = "%s/%s/r*" % (branch, filename)
|
||||
tags = [t for t in repo.list_tags_match(pattern) if is_commit_tag(repo, commit_id, t)]
|
||||
if len(tags) != 1:
|
||||
return None
|
||||
else:
|
||||
return tags[0]
|
||||
|
||||
def is_commit_tag(repo, commit_id, tag):
|
||||
"""Check to see if a tag points to a specific commit.
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param commit_id: The commit id to check
|
||||
:type commit_id: Git.OId
|
||||
:param tag: The tag to check
|
||||
:type tag: str
|
||||
:returns: True if the tag points to the commit, False otherwise
|
||||
:rtype: bool
|
||||
"""
|
||||
ref = repo.lookup_reference("refs/tags/" + tag)
|
||||
tag_id = ref.get_target()
|
||||
tag = repo.lookup(tag_id, Git.Tag)
|
||||
target_id = tag.get_target_id()
|
||||
return commit_id.compare(target_id) == 0
|
||||
|
||||
def get_revision_from_tag(tag):
|
||||
"""Return the revision number from a tag
|
||||
|
||||
:param tag: The tag to exract the revision from
|
||||
:type tag: str
|
||||
:returns: The integer revision or None
|
||||
:rtype: int or None
|
||||
|
||||
The revision is the part after the r in 'branch/filename/rXXX'
|
||||
"""
|
||||
if tag is None:
|
||||
return None
|
||||
try:
|
||||
return int(tag.rsplit('r', 2)[-1])
|
||||
except (ValueError, IndexError):
|
||||
return None
|
||||
|
||||
class CommitDetails(DataHolder):
|
||||
def __init__(self, commit, timestamp, message, revision=None):
|
||||
DataHolder.__init__(self,
|
||||
commit = commit,
|
||||
timestamp = timestamp,
|
||||
message = message,
|
||||
revision = revision)
|
||||
|
||||
def list_commits(repo, branch, filename, limit=0):
|
||||
"""List the commit history of a file on a branch.
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param filename: filename to revert
|
||||
:type filename: str
|
||||
:param limit: Number of commits to return (0=all)
|
||||
:type limit: int
|
||||
:returns: A list of commit details
|
||||
:rtype: list(CommitDetails)
|
||||
:raises: Can raise errors from Ggit
|
||||
"""
|
||||
revwalk = Git.RevisionWalker.new(repo)
|
||||
revwalk.set_sort_mode(Git.SortMode.TIME)
|
||||
branch_ref = "refs/heads/%s" % branch
|
||||
revwalk.push_ref(branch_ref)
|
||||
|
||||
commits = []
|
||||
while True:
|
||||
commit_id = revwalk.next()
|
||||
if not commit_id:
|
||||
break
|
||||
commit = repo.lookup(commit_id, Git.Commit)
|
||||
|
||||
parents = commit.get_parents()
|
||||
# No parents? Must be the first commit.
|
||||
if parents.get_size() == 0:
|
||||
continue
|
||||
|
||||
tree = commit.get_tree()
|
||||
# Is the filename in this tree? If not, move on.
|
||||
if not tree.get_by_name(filename):
|
||||
continue
|
||||
|
||||
# Is filename different in all of the parent commits?
|
||||
parent_commits = list(map(parents.get, range(0, parents.get_size())))
|
||||
is_diff = all([is_parent_diff(repo, filename, tree, pc) for pc in parent_commits])
|
||||
# No changes from parents, skip it.
|
||||
if not is_diff:
|
||||
continue
|
||||
|
||||
tag = find_commit_tag(repo, branch, filename, commit.get_id())
|
||||
try:
|
||||
commits.append(get_commit_details(commit, get_revision_from_tag(tag)))
|
||||
if limit and len(commits) > limit:
|
||||
break
|
||||
except CommitTimeValError:
|
||||
# Skip any commits that have trouble converting the time
|
||||
# TODO - log details about this failure
|
||||
pass
|
||||
|
||||
# These will be in reverse time sort order thanks to revwalk
|
||||
return commits
|
||||
|
||||
def get_commit_details(commit, revision=None):
|
||||
"""Return the details about a specific commit.
|
||||
|
||||
:param commit: The commit to get details from
|
||||
:type commit: Git.Commit
|
||||
:param revision: Optional commit revision
|
||||
:type revision: int
|
||||
:returns: Details about the commit
|
||||
:rtype: CommitDetails
|
||||
:raises: CommitTimeValError or Ggit exceptions
|
||||
|
||||
"""
|
||||
message = commit.get_message()
|
||||
commit_str = commit.get_id().to_string()
|
||||
sig = commit.get_committer()
|
||||
|
||||
datetime = sig.get_time()
|
||||
# XXX What do we do with timezone?
|
||||
_timezone = sig.get_time_zone()
|
||||
timeval = GLib.TimeVal()
|
||||
ok = datetime.to_timeval(timeval)
|
||||
if not ok:
|
||||
raise CommitTimeValError
|
||||
time_str = timeval.to_iso8601()
|
||||
|
||||
return CommitDetails(commit_str, time_str, message, revision)
|
||||
|
||||
def is_parent_diff(repo, filename, tree, parent):
|
||||
"""Check to see if the commit is different from its parents
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param filename: filename to revert
|
||||
:type filename: str
|
||||
:param tree: The commit's tree
|
||||
:type tree: Git.Tree
|
||||
:param parent: The commit's parent commit
|
||||
:type parent: Git.Commit
|
||||
:retuns: True if filename in the commit is different from its parents
|
||||
:rtype: bool
|
||||
"""
|
||||
diff_opts = Git.DiffOptions.new()
|
||||
diff_opts.set_pathspec([filename])
|
||||
diff = Git.Diff.new_tree_to_tree(repo, parent.get_tree(), tree, diff_opts)
|
||||
return diff.get_num_deltas() > 0
|
||||
|
||||
def find_name(name, lst):
|
||||
"""Find the dict matching the name in a list and return it.
|
||||
|
||||
:param name: Name to search for
|
||||
:type name: str
|
||||
:param lst: List of dict's with "name" field
|
||||
:returns: First dict with matching name, or None
|
||||
:rtype: dict or None
|
||||
"""
|
||||
for e in lst:
|
||||
if e["name"] == name:
|
||||
return e
|
||||
return None
|
||||
|
||||
def diff_items(title, old_items, new_items):
|
||||
"""Return the differences between two lists of dicts.
|
||||
|
||||
:param title: Title of the entry
|
||||
:type title: str
|
||||
:param old_items: List of item dicts with "name" field
|
||||
:type old_items: list(dict)
|
||||
:param new_items: List of item dicts with "name" field
|
||||
:type new_items: list(dict)
|
||||
:returns: List of diff dicts with old/new entries
|
||||
:rtype: list(dict)
|
||||
"""
|
||||
diffs = []
|
||||
old_names = set(m["name"] for m in old_items)
|
||||
new_names = set(m["name"] for m in new_items)
|
||||
|
||||
added_items = new_names.difference(old_names)
|
||||
added_items = sorted(added_items, key=lambda n: n.lower())
|
||||
|
||||
removed_items = old_names.difference(new_names)
|
||||
removed_items = sorted(removed_items, key=lambda n: n.lower())
|
||||
|
||||
same_items = old_names.intersection(new_names)
|
||||
same_items = sorted(same_items, key=lambda n: n.lower())
|
||||
|
||||
for name in added_items:
|
||||
diffs.append({"old":None,
|
||||
"new":{title:find_name(name, new_items)}})
|
||||
|
||||
for name in removed_items:
|
||||
diffs.append({"old":{title:find_name(name, old_items)},
|
||||
"new":None})
|
||||
|
||||
for name in same_items:
|
||||
old_item = find_name(name, old_items)
|
||||
new_item = find_name(name, new_items)
|
||||
if old_item != new_item:
|
||||
diffs.append({"old":{title:old_item},
|
||||
"new":{title:new_item}})
|
||||
|
||||
return diffs
|
||||
|
||||
|
||||
def recipe_diff(old_recipe, new_recipe):
|
||||
"""Diff two versions of a recipe
|
||||
|
||||
:param old_recipe: The old version of the recipe
|
||||
:type old_recipe: Recipe
|
||||
:param new_recipe: The new version of the recipe
|
||||
:type new_recipe: Recipe
|
||||
:returns: A list of diff dict entries with old/new
|
||||
:rtype: list(dict)
|
||||
"""
|
||||
|
||||
diffs = []
|
||||
# These cannot be added or removed, just different
|
||||
for element in ["name", "description", "version"]:
|
||||
if old_recipe[element] != new_recipe[element]:
|
||||
diffs.append({"old":{element.title():old_recipe[element]},
|
||||
"new":{element.title():new_recipe[element]}})
|
||||
|
||||
diffs.extend(diff_items("Module", old_recipe["modules"], new_recipe["modules"]))
|
||||
diffs.extend(diff_items("Package", old_recipe["packages"], new_recipe["packages"]))
|
||||
|
||||
return diffs
|
77
src/pylorax/api/server.py
Normal file
77
src/pylorax/api/server.py
Normal file
@ -0,0 +1,77 @@
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import logging
|
||||
log = logging.getLogger("lorax-composer")
|
||||
|
||||
from collections import namedtuple
|
||||
from flask import Flask, jsonify, redirect, send_from_directory
|
||||
from glob import glob
|
||||
import os
|
||||
|
||||
from pylorax import vernum
|
||||
from pylorax.api.crossdomain import crossdomain
|
||||
from pylorax.api.v0 import v0_api
|
||||
from pylorax.sysutils import joinpaths
|
||||
|
||||
GitLock = namedtuple("GitLock", ["repo", "lock", "dir"])
|
||||
YumLock = namedtuple("YumLock", ["yb", "lock"])
|
||||
|
||||
server = Flask(__name__)
|
||||
|
||||
__all__ = ["server", "GitLock"]
|
||||
|
||||
@server.route('/')
|
||||
def server_root():
|
||||
redirect("/api/docs/")
|
||||
|
||||
@server.route("/api/docs/")
|
||||
@server.route("/api/docs/<path:path>")
|
||||
def api_docs(path=None):
|
||||
# Find the html docs
|
||||
try:
|
||||
# This assumes it is running from the source tree
|
||||
docs_path = os.path.abspath(joinpaths(os.path.dirname(__file__), "../../../docs/html"))
|
||||
except IndexError:
|
||||
docs_path = glob("/usr/share/doc/lorax-*/html/")[0]
|
||||
|
||||
if not path:
|
||||
path="index.html"
|
||||
return send_from_directory(docs_path, path)
|
||||
|
||||
@server.route("/api/status")
|
||||
@crossdomain(origin="*")
|
||||
def v0_status():
|
||||
"""
|
||||
`/api/v0/status`
|
||||
^^^^^^^^^^^^^^^^
|
||||
Return the status of the API Server::
|
||||
|
||||
{ "api": "0",
|
||||
"build": "devel",
|
||||
"db_supported": true,
|
||||
"db_version": "0",
|
||||
"schema_version": "0",
|
||||
"backend": "lorax-composer"}
|
||||
"""
|
||||
return jsonify(backend="lorax-composer",
|
||||
build=vernum,
|
||||
api="0",
|
||||
db_version="0",
|
||||
schema_version="0",
|
||||
db_supported=True)
|
||||
|
||||
v0_api(server)
|
1563
src/pylorax/api/v0.py
Normal file
1563
src/pylorax/api/v0.py
Normal file
File diff suppressed because it is too large
Load Diff
99
src/pylorax/api/workspace.py
Normal file
99
src/pylorax/api/workspace.py
Normal file
@ -0,0 +1,99 @@
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import os
|
||||
|
||||
from pylorax.api.recipes import recipe_filename, recipe_from_toml, RecipeFileError
|
||||
from pylorax.sysutils import joinpaths
|
||||
|
||||
|
||||
def workspace_dir(repo, branch):
|
||||
"""Create the workspace's path from a Repository and branch
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:returns: The path to the branch's workspace directory
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
repo_path = repo.get_location().get_path()
|
||||
return joinpaths(repo_path, "workspace", branch)
|
||||
|
||||
|
||||
def workspace_read(repo, branch, recipe_name):
|
||||
"""Read a Recipe from the branch's workspace
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe_name: The name of the recipe
|
||||
:type recipe_name: str
|
||||
:returns: The workspace copy of the recipe, or None if it doesn't exist
|
||||
:rtype: Recipe or None
|
||||
:raises: RecipeFileError
|
||||
"""
|
||||
ws_dir = workspace_dir(repo, branch)
|
||||
if not os.path.isdir(ws_dir):
|
||||
os.makedirs(ws_dir)
|
||||
filename = joinpaths(ws_dir, recipe_filename(recipe_name))
|
||||
if not os.path.exists(filename):
|
||||
return None
|
||||
try:
|
||||
f = open(filename, 'rb')
|
||||
recipe = recipe_from_toml(f.read())
|
||||
except IOError:
|
||||
raise RecipeFileError
|
||||
return recipe
|
||||
|
||||
|
||||
def workspace_write(repo, branch, recipe):
|
||||
"""Write a recipe to the workspace
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe: The recipe to write to the workspace
|
||||
:type recipe: Recipe
|
||||
:returns: None
|
||||
:raises: IO related errors
|
||||
"""
|
||||
ws_dir = workspace_dir(repo, branch)
|
||||
if not os.path.isdir(ws_dir):
|
||||
os.makedirs(ws_dir)
|
||||
filename = joinpaths(ws_dir, recipe.filename)
|
||||
open(filename, 'wb').write(recipe.toml())
|
||||
|
||||
|
||||
def workspace_delete(repo, branch, recipe_name):
|
||||
"""Delete the recipe from the workspace
|
||||
|
||||
:param repo: Open repository
|
||||
:type repo: Git.Repository
|
||||
:param branch: Branch name
|
||||
:type branch: str
|
||||
:param recipe_name: The name of the recipe
|
||||
:type recipe_name: str
|
||||
:returns: None
|
||||
:raises: IO related errors
|
||||
"""
|
||||
ws_dir = workspace_dir(repo, branch)
|
||||
filename = joinpaths(ws_dir, recipe_filename(recipe_name))
|
||||
if os.path.exists(filename):
|
||||
os.unlink(filename)
|
113
src/pylorax/api/yumbase.py
Normal file
113
src/pylorax/api/yumbase.py
Normal file
@ -0,0 +1,113 @@
|
||||
#
|
||||
# Copyright (C) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# pylint: disable=bad-preconf-access
|
||||
|
||||
import logging
|
||||
log = logging.getLogger("lorax-composer")
|
||||
|
||||
import configparser
|
||||
from fnmatch import fnmatchcase
|
||||
from glob import glob
|
||||
import os
|
||||
import yum
|
||||
# This is a hack to short circuit yum's internal logging
|
||||
yum.logginglevels._added_handlers = True
|
||||
|
||||
from pylorax.sysutils import joinpaths
|
||||
|
||||
def get_base_object(conf):
|
||||
"""Get the Yum object with settings from the config file
|
||||
|
||||
:param conf: configuration object
|
||||
:type conf: ComposerParser
|
||||
:returns: A Yum base object
|
||||
:rtype: YumBase
|
||||
"""
|
||||
cachedir = os.path.abspath(conf.get("composer", "cache_dir"))
|
||||
yumconf = os.path.abspath(conf.get("composer", "yum_conf"))
|
||||
yumroot = os.path.abspath(conf.get("composer", "yum_root"))
|
||||
repodir = os.path.abspath(conf.get("composer", "repo_dir"))
|
||||
|
||||
c = configparser.ConfigParser()
|
||||
|
||||
# add the main section
|
||||
section = "main"
|
||||
data = {"cachedir": cachedir,
|
||||
"keepcache": 0,
|
||||
"gpgcheck": 0,
|
||||
"plugins": 0,
|
||||
"assumeyes": 1,
|
||||
"reposdir": "",
|
||||
"tsflags": "nodocs"}
|
||||
|
||||
if conf.get_default("yum", "proxy", None):
|
||||
data["proxy"] = conf.get("yum", "proxy")
|
||||
|
||||
if conf.has_option("yum", "sslverify") and not conf.getboolean("yum", "sslverify"):
|
||||
data["sslverify"] = "0"
|
||||
|
||||
c.add_section(section)
|
||||
list(map(lambda key_value: c.set(section, key_value[0], key_value[1]), list(data.items())))
|
||||
|
||||
# write the yum configuration file
|
||||
with open(yumconf, "w") as f:
|
||||
c.write(f)
|
||||
|
||||
# create the yum base object
|
||||
yb = yum.YumBase()
|
||||
|
||||
yb.preconf.fn = yumconf
|
||||
|
||||
yb.preconf.root = yumroot
|
||||
if not os.path.isdir(yb.preconf.root):
|
||||
os.makedirs(yb.preconf.root)
|
||||
|
||||
_releasever = conf.get_default("composer", "releasever", None)
|
||||
if not _releasever:
|
||||
distroverpkg = ['system-release(releasever)', 'redhat-release']
|
||||
# Use yum private function to guess the releasever
|
||||
_releasever = yum.config._getsysver("/", distroverpkg)
|
||||
log.info("releasever = %s", _releasever)
|
||||
yb.preconf.releasever = _releasever
|
||||
|
||||
# Turn on as much yum logging as we can
|
||||
yb.preconf.debuglevel = 6
|
||||
yb.preconf.errorlevel = 6
|
||||
yb.logger.setLevel(logging.DEBUG)
|
||||
yb.verbose_logger.setLevel(logging.DEBUG)
|
||||
|
||||
# Gather up all the available repo files, add the ones matching "repos":"enabled" patterns
|
||||
enabled_repos = conf.get("repos", "enabled").split(",")
|
||||
repo_files = glob(joinpaths(repodir, "*.repo"))
|
||||
if not conf.has_option("repos", "use_system_repos") or conf.getboolean("repos", "use_system_repos"):
|
||||
repo_files.extend(glob("/etc/yum.repos.d/*.repo"))
|
||||
|
||||
for repo_file in repo_files:
|
||||
name = os.path.basename(repo_file)[:-5]
|
||||
if any([fnmatchcase(name, pattern) for pattern in enabled_repos]): # pylint: disable=cell-var-from-loop
|
||||
yb.getReposFromConfigFile(repo_file)
|
||||
|
||||
# Update the metadata from the enabled repos to speed up later operations
|
||||
log.info("Updating yum repository metadata")
|
||||
for r in yb.repos.sort():
|
||||
r.metadata_expire = 0
|
||||
r.mdpolicy = "group:all"
|
||||
yb.doRepoSetup()
|
||||
yb.repos.doSetup()
|
||||
yb.repos.populateSack(mdtype='all', cacheonly=1)
|
||||
|
||||
return yb
|
@ -76,14 +76,14 @@ class CrossdomainTest(unittest.TestCase):
|
||||
def test_02_with_headers_specified(self):
|
||||
response = self.server.get("/02")
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertEqual('Hello, World!', response.data)
|
||||
self.assertEqual(b'Hello, World!', response.data)
|
||||
|
||||
self.assertEqual('TESTING', response.headers['Access-Control-Allow-Headers'])
|
||||
|
||||
def test_03_with_max_age_as_timedelta(self):
|
||||
response = self.server.get("/03")
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertEqual('Hello, World!', response.data)
|
||||
self.assertEqual(b'Hello, World!', response.data)
|
||||
|
||||
expected_max_age = int(timedelta(days=7).total_seconds())
|
||||
actual_max_age = int(response.headers['Access-Control-Max-Age'])
|
||||
@ -92,7 +92,7 @@ class CrossdomainTest(unittest.TestCase):
|
||||
def test_04_attach_to_all_false(self):
|
||||
response = self.server.get("/04")
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertEqual('Hello, World!', response.data)
|
||||
self.assertEqual(b'Hello, World!', response.data)
|
||||
|
||||
# when attach_to_all is False the decorator will not assign
|
||||
# the Access-Control-* headers to the response
|
||||
@ -103,15 +103,17 @@ class CrossdomainTest(unittest.TestCase):
|
||||
def test_05_options_request(self):
|
||||
response = self.server.options("/05")
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertEqual('Hello, World!', response.data)
|
||||
self.assertEqual(b'Hello, World!', response.data)
|
||||
|
||||
self.assertEqual(response.headers['Access-Control-Allow-Methods'], 'HEAD, OPTIONS, GET')
|
||||
# Not always in the same order, so test individually
|
||||
for m in ["HEAD", "OPTIONS", "GET"]:
|
||||
self.assertIn(m, response.headers['Access-Control-Allow-Methods'])
|
||||
|
||||
|
||||
def test_06_with_origin_as_list(self):
|
||||
response = self.server.get("/06")
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertEqual('Hello, World!', response.data)
|
||||
self.assertEqual(b'Hello, World!', response.data)
|
||||
|
||||
for header, value in response.headers:
|
||||
if header == 'Access-Control-Allow-Origin':
|
||||
|
@ -44,7 +44,7 @@ class Yaps(object):
|
||||
version = "version"
|
||||
|
||||
def returnChangelog(self):
|
||||
return [[0,1,"Heavy!"]]
|
||||
return [[0, 1, "Heavy!"]]
|
||||
|
||||
|
||||
class TM(object):
|
||||
@ -74,7 +74,7 @@ class ProjectsTest(unittest.TestCase):
|
||||
self.assertEqual(api_time(499222800), "1985-10-27T01:00:00")
|
||||
|
||||
def test_api_changelog(self):
|
||||
self.assertEqual(api_changelog([[0,1,"Heavy!"], [0, 1, "Light!"]]), "Heavy!")
|
||||
self.assertEqual(api_changelog([[0, 1, "Heavy!"], [0, 1, "Light!"]]), "Heavy!")
|
||||
|
||||
def test_api_changelog_empty_list(self):
|
||||
self.assertEqual(api_changelog([]), '')
|
||||
|
@ -514,8 +514,8 @@ class ServerTestCase(unittest.TestCase):
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertTrue(len(response.data) > 1024)
|
||||
# look for some well known strings inside the documentation
|
||||
self.assertRegexpMatches(response.data, r"Lorax [\d.]+ documentation")
|
||||
self.assertRegexpMatches(response.data, r"Copyright \d+, Red Hat, Inc.")
|
||||
self.assertRegex(response.data, r"Lorax [\d.]+ documentation")
|
||||
self.assertRegex(response.data, r"Copyright \d+, Red Hat, Inc.")
|
||||
|
||||
def test_api_docs(self):
|
||||
"""Test the /api/docs/"""
|
||||
|
@ -19,7 +19,7 @@ import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import ConfigParser
|
||||
import configparser
|
||||
|
||||
from pylorax.api.config import configure, make_yum_dirs
|
||||
from pylorax.api.yumbase import get_base_object
|
||||
@ -48,7 +48,7 @@ use_system_repos = False
|
||||
self.yb = get_base_object(config)
|
||||
|
||||
# will read the stored yum config file
|
||||
self.yumconf = ConfigParser.ConfigParser()
|
||||
self.yumconf = configparser.ConfigParser()
|
||||
self.yumconf.read([config.get("composer", "yum_conf")])
|
||||
|
||||
@classmethod
|
||||
|
Loading…
Reference in New Issue
Block a user