diff --git a/.buildinfo b/.buildinfo index c71008ba..e4762278 100644 --- a/.buildinfo +++ b/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 24aa8e42090141f3af88e17dd68ad7a5 +config: df84227b4f6d45f96e4ae4c3727d149b tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.doctrees/composer-cli.doctree b/.doctrees/composer-cli.doctree index 214d7c8e..d79a733d 100644 Binary files a/.doctrees/composer-cli.doctree and b/.doctrees/composer-cli.doctree differ diff --git a/.doctrees/composer.cli.doctree b/.doctrees/composer.cli.doctree index df367584..4c8743cc 100644 Binary files a/.doctrees/composer.cli.doctree and b/.doctrees/composer.cli.doctree differ diff --git a/.doctrees/environment.pickle b/.doctrees/environment.pickle index f88b737b..c98d4afe 100644 Binary files a/.doctrees/environment.pickle and b/.doctrees/environment.pickle differ diff --git a/.doctrees/livemedia-creator.doctree b/.doctrees/livemedia-creator.doctree index 9c3746cf..22942723 100644 Binary files a/.doctrees/livemedia-creator.doctree and b/.doctrees/livemedia-creator.doctree differ diff --git a/.doctrees/lorax-composer.doctree b/.doctrees/lorax-composer.doctree index 65ed5221..8e467269 100644 Binary files a/.doctrees/lorax-composer.doctree and b/.doctrees/lorax-composer.doctree differ diff --git a/.doctrees/lorax.doctree b/.doctrees/lorax.doctree index 9a15ba1e..76b9fc56 100644 Binary files a/.doctrees/lorax.doctree and b/.doctrees/lorax.doctree differ diff --git a/.doctrees/pylorax.api.doctree b/.doctrees/pylorax.api.doctree index adcf12e0..dbcc1a48 100644 Binary files a/.doctrees/pylorax.api.doctree and b/.doctrees/pylorax.api.doctree differ diff --git a/.doctrees/pylorax.doctree b/.doctrees/pylorax.doctree index b5d93ea1..264be5a4 100644 Binary files a/.doctrees/pylorax.doctree and b/.doctrees/pylorax.doctree differ diff --git a/_modules/composer/cli.html b/_modules/composer/cli.html index 873c472e..20e4ce8d 100644 --- a/_modules/composer/cli.html +++ b/_modules/composer/cli.html @@ -8,7 +8,7 @@ - composer.cli — Lorax 29.7 documentation + composer.cli — Lorax 29.11 documentation @@ -57,7 +57,7 @@
- 29.7 + 29.11
@@ -179,13 +179,15 @@ from composer.cli.projects import projects_cmd from composer.cli.compose import compose_cmd from composer.cli.sources import sources_cmd +from composer.cli.status import status_cmd command_map = { "blueprints": blueprints_cmd, "modules": modules_cmd, "projects": projects_cmd, "compose": compose_cmd, - "sources": sources_cmd + "sources": sources_cmd, + "status": status_cmd } @@ -195,15 +197,12 @@ :param opts: Cmdline arguments :type opts: argparse.Namespace """ - if len(opts.args) == 0: - log.error("Missing command") - return 1 - elif opts.args[0] not in command_map: + + # Making sure opts.args is not empty (thus, has a command and subcommand) + # is already handled in src/bin/composer-cli. + if opts.args[0] not in command_map: log.error("Unknown command %s", opts.args[0]) return 1 - if len(opts.args) == 1: - log.error("Missing %s sub-command", opts.args[0]) - return 1 else: try: return command_map[opts.args[0]](opts) @@ -244,7 +243,7 @@ + + + + + + +
+ + + + +
+ + + + + +
+ +
+ + + + + + + + + + + + + + + + + +
+ + + + +
+
+
+
+ +

Source code for composer.cli.status

+#
+# Copyright (C) 2018  Red Hat, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+import logging
+log = logging.getLogger("composer-cli")
+
+from composer import http_client as client
+from composer.cli.help import status_help
+from composer.cli.utilities import handle_api_result
+
+
[docs]def status_cmd(opts): + """Process status commands + + :param opts: Cmdline arguments + :type opts: argparse.Namespace + :returns: Value to return from sys.exit() + :rtype: int + """ + if opts.args[1] == "help" or opts.args[1] == "--help": + print(status_help) + return 0 + elif opts.args[1] != "show": + log.error("Unknown status command: %s", opts.args[1]) + return 1 + + result = client.get_url_json(opts.socket, "/api/status") + (rc, exit_now) = handle_api_result(result, opts.json) + if exit_now: + return rc + + print("API server status:") + print(" Database version: " + result["db_version"]) + print(" Database supported: %s" % result["db_supported"]) + print(" Schema version: " + result["schema_version"]) + print(" API version: " + result["api"]) + print(" Backend: " + result["backend"]) + print(" Build: " + result["build"]) + + if result["msgs"]: + print("Error messages:") + print("\n".join([" " + r for r in result["msgs"]])) + + return rc
+
+ +
+ +
+ + +
+
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/_modules/composer/cli/utilities.html b/_modules/composer/cli/utilities.html index 3e2a142e..a3f4bfa9 100644 --- a/_modules/composer/cli/utilities.html +++ b/_modules/composer/cli/utilities.html @@ -8,7 +8,7 @@ - composer.cli.utilities — Lorax 29.7 documentation + composer.cli.utilities — Lorax 29.11 documentation @@ -57,7 +57,7 @@
- 29.7 + 29.11
@@ -215,17 +215,28 @@ :param result: JSON result from the http query :type result: dict + :rtype: tuple + :returns: (rc, should_exit_now) + + Return the correct rc for the program (0 or 1), and whether or + not to continue processing the results. """ if show_json: print(json.dumps(result, indent=4)) - - for err in result.get("errors", []): - log.error(err) - - if result["status"] == True: - return 0 else: - return 1 + for err in result.get("errors", []): + log.error(err["msg"]) + + # What's the rc? If status is present, use that + # If not, use length of errors + if "status" in result: + rc = bool(not result["status"]) + else: + rc = bool(len(result.get("errors", [])) > 0) + + # Caller should return if showing json, or status was present and False + exit_now = show_json or ("status" in result and rc) + return (rc, exit_now)
[docs]def packageNEVRA(pkg): """Return the package info as a NEVRA @@ -273,7 +284,7 @@ + + + + + + +
+ + + + +
+ + + + + +
+ +
+ + + + + + + + + + + + + + + + + +
+ + + + +
+
+
+
+ +

Source code for pylorax.api.checkparams

+#
+# Copyright (C) 2018  Red Hat, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+
+import logging
+log = logging.getLogger("lorax-composer")
+
+from flask import jsonify
+from functools import update_wrapper
+
+# A decorator for checking the parameters provided to the API route implementing
+# functions.  The tuples parameter is a list of tuples.  Each tuple is the string
+# name of a parameter ("blueprint_name", not blueprint_name), the value it's set
+# to by flask if the caller did not provide it, and a message to be returned to
+# the user.
+#
+# If the parameter is set to its default, the error message is returned.  Otherwise,
+# the decorated function is called and its return value is returned.
+
[docs]def checkparams(tuples): + def decorator(f): + def wrapped_function(*args, **kwargs): + for tup in tuples: + if kwargs[tup[0]] == tup[1]: + log.error("(%s) %s", f.__name__, tup[2]) + return jsonify(status=False, errors=[tup[2]]), 400 + + return f(*args, **kwargs) + + return update_wrapper(wrapped_function, f) + + return decorator
+
+ +
+ +
+ + +
+
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/_modules/pylorax/api/cmdline.html b/_modules/pylorax/api/cmdline.html index 256c095f..767d4447 100644 --- a/_modules/pylorax/api/cmdline.html +++ b/_modules/pylorax/api/cmdline.html @@ -8,7 +8,7 @@ - pylorax.api.cmdline — Lorax 29.7 documentation + pylorax.api.cmdline — Lorax 29.11 documentation @@ -57,7 +57,7 @@
- 29.7 + 29.11
@@ -247,7 +247,7 @@ + + + + + + +
+ + + + +
+ + + + + +
+ +
+ + + + + + + + + + + + + + + + + +
+ + + + +
+
+
+
+ +

Source code for pylorax.api.dnfbase

+#
+# Copyright (C) 2017-2018 Red Hat, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+# pylint: disable=bad-preconf-access
+
+import logging
+log = logging.getLogger("lorax-composer")
+
+import dnf
+import dnf.logging
+from glob import glob
+import os
+import shutil
+
+
+
[docs]def get_base_object(conf): + """Get the DNF object with settings from the config file + + :param conf: configuration object + :type conf: ComposerParser + :returns: A DNF Base object + :rtype: dnf.Base + """ + cachedir = os.path.abspath(conf.get("composer", "cache_dir")) + dnfconf = os.path.abspath(conf.get("composer", "dnf_conf")) + dnfroot = os.path.abspath(conf.get("composer", "dnf_root")) + repodir = os.path.abspath(conf.get("composer", "repo_dir")) + + # Setup the config for the DNF Base object + dbo = dnf.Base() + dbc = dbo.conf +# TODO - Handle this +# dbc.logdir = logdir + dbc.installroot = dnfroot + if not os.path.isdir(dnfroot): + os.makedirs(dnfroot) + if not os.path.isdir(repodir): + os.makedirs(repodir) + + dbc.cachedir = cachedir + dbc.reposdir = [repodir] + dbc.install_weak_deps = False + dbc.prepend_installroot('persistdir') + dbc.tsflags.append('nodocs') + + if conf.get_default("dnf", "proxy", None): + dbc.proxy = conf.get("dnf", "proxy") + + if conf.has_option("dnf", "sslverify") and not conf.getboolean("dnf", "sslverify"): + dbc.sslverify = False + + _releasever = conf.get_default("composer", "releasever", None) + if not _releasever: + # Use the releasever of the host system + _releasever = dnf.rpm.detect_releasever("/") + log.info("releasever = %s", _releasever) + dbc.releasever = _releasever + + # write the dnf configuration file + with open(dnfconf, "w") as f: + f.write(dbc.dump()) + + # dnf needs the repos all in one directory, composer uses repodir for this + # if system repos are supposed to be used, copy them into repodir, overwriting any previous copies + if not conf.has_option("repos", "use_system_repos") or conf.getboolean("repos", "use_system_repos"): + for repo_file in glob("/etc/yum.repos.d/*.repo"): + shutil.copy2(repo_file, repodir) + dbo.read_all_repos() + + # Update the metadata from the enabled repos to speed up later operations + log.info("Updating repository metadata") + try: + dbo.fill_sack(load_system_repo=False) + dbo.read_comps() + except dnf.exceptions.Error as e: + log.error("Failed to update metadata: %s", str(e)) + raise RuntimeError("Fetching metadata failed: %s" % str(e)) + + return dbo
+
+ +
+ +
+ + +
+
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/_modules/pylorax/api/projects.html b/_modules/pylorax/api/projects.html index 6caa45e2..15c2c726 100644 --- a/_modules/pylorax/api/projects.html +++ b/_modules/pylorax/api/projects.html @@ -8,7 +8,7 @@ - pylorax.api.projects — Lorax 29.7 documentation + pylorax.api.projects — Lorax 29.11 documentation @@ -57,7 +57,7 @@
- 29.7 + 29.11
@@ -339,45 +339,60 @@ pkgs = dbo.sack.query().available() return sorted(map(pkg_to_project_info, pkgs), key=lambda p: p["name"].lower())
-def _depsolve(dbo, projects): +def _depsolve(dbo, projects, groups): """Add projects to a new transaction :param dbo: dnf base object :type dbo: dnf.Base :param projects: The projects and version globs to find the dependencies for :type projects: List of tuples + :param groups: The groups to include in dependency solving + :type groups: List of str :returns: None :rtype: None :raises: ProjectsError if there was a problem installing something """ # This resets the transaction dbo.reset(goal=True) + install_errors = [] + for name in groups: + try: + dbo.group_install(name, ["mandatory", "default"]) + except dnf.exceptions.MarkingError as e: + install_errors.append(("Group %s" % (name), str(e))) + for name, version in projects: try: if not version: version = "*" pkgs = [pkg for pkg in dnf.subject.Subject(name).get_best_query(dbo.sack).filter(version__glob=version, latest=True)] if not pkgs: - raise ProjectsError("No match for %s-%s" % (name, version)) + install_errors.append(("%s-%s" % (name, version), "No match")) + continue for p in pkgs: dbo.package_install(p) - except dnf.exceptions.MarkingError: - raise ProjectsError("No match for %s-%s" % (name, version)) + except dnf.exceptions.MarkingError as e: + install_errors.append(("%s-%s" % (name, version), str(e))) + + if install_errors: + raise ProjectsError("The following package(s) had problems: %s" % ",".join(["%s (%s)" % (pattern, err) for pattern, err in install_errors])) -
[docs]def projects_depsolve(dbo, projects): +
[docs]def projects_depsolve(dbo, projects, groups): """Return the dependencies for a list of projects :param dbo: dnf base object :type dbo: dnf.Base :param projects: The projects to find the dependencies for :type projects: List of Strings + :param groups: The groups to include in dependency solving + :type groups: List of str :returns: NEVRA's of the project and its dependencies :rtype: list of dicts :raises: ProjectsError if there was a problem installing something """ - _depsolve(dbo, projects) + _depsolve(dbo, projects, groups) try: dbo.resolve() @@ -411,18 +426,20 @@ return installed_size
-
[docs]def projects_depsolve_with_size(dbo, projects, with_core=True): +
[docs]def projects_depsolve_with_size(dbo, projects, groups, with_core=True): """Return the dependencies and installed size for a list of projects :param dbo: dnf base object :type dbo: dnf.Base :param project_names: The projects to find the dependencies for :type project_names: List of Strings + :param groups: The groups to include in dependency solving + :type groups: List of str :returns: installed size and a list of NEVRA's of the project and its dependencies :rtype: tuple of (int, list of dicts) :raises: ProjectsError if there was a problem installing something """ - _depsolve(dbo, projects) + _depsolve(dbo, projects, groups) if with_core: dbo.group_install("core", ['mandatory', 'default', 'optional']) @@ -475,7 +492,7 @@ # Add the dependency info to each one for module in modules: - module["dependencies"] = projects_depsolve(dbo, [(module["name"], "*.*")]) + module["dependencies"] = projects_depsolve(dbo, [(module["name"], "*.*")], []) return modules
@@ -721,7 +738,7 @@ + + + + + + +
+ + + + +
+ + + + + +
+ +
+ + + + + + + + + + + + + + + + + +
+ + + + +
+
+
+
+ +

Source code for pylorax.api.timestamp

+#
+# Copyright (C) 2018  Red Hat, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+
+import pytoml as toml
+import time
+
+from pylorax.sysutils import joinpaths
+
+TS_CREATED  = "created"
+TS_STARTED  = "started"
+TS_FINISHED = "finished"
+
+
[docs]def write_timestamp(destdir, ty): + path = joinpaths(destdir, "times.toml") + + try: + contents = toml.loads(open(path, "r").read()) + except IOError: + contents = toml.loads("") + + if ty == TS_CREATED: + contents[TS_CREATED] = time.time() + elif ty == TS_STARTED: + contents[TS_STARTED] = time.time() + elif ty == TS_FINISHED: + contents[TS_FINISHED] = time.time() + + with open(path, "w") as f: + f.write(toml.dumps(contents))
+ +
[docs]def timestamp_dict(destdir): + path = joinpaths(destdir, "times.toml") + + try: + return toml.loads(open(path, "r").read()) + except IOError: + return toml.loads("")
+
+ +
+ +
+ + +
+
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/_modules/pylorax/api/v0.html b/_modules/pylorax/api/v0.html index e275807f..26f7da78 100644 --- a/_modules/pylorax/api/v0.html +++ b/_modules/pylorax/api/v0.html @@ -8,7 +8,7 @@ - pylorax.api.v0 — Lorax 29.7 documentation + pylorax.api.v0 — Lorax 29.11 documentation @@ -57,7 +57,7 @@
- 29.7 + 29.11
@@ -884,14 +884,14 @@ "id": "45502a6d-06e8-48a5-a215-2b4174b3614b", "blueprint": "glusterfs", "queue_status": "WAITING", - "timestamp": 1517362647.4570868, + "job_created": 1517362647.4570868, "version": "0.0.6" }, { "id": "6d292bd0-bec7-4825-8d7d-41ef9c3e4b73", "blueprint": "kubernetes", "queue_status": "WAITING", - "timestamp": 1517362659.0034983, + "job_created": 1517362659.0034983, "version": "0.0.1" } ], @@ -900,7 +900,8 @@ "id": "745712b2-96db-44c0-8014-fe925c35e795", "blueprint": "glusterfs", "queue_status": "RUNNING", - "timestamp": 1517362633.7965999, + "job_created": 1517362633.7965999, + "job_started": 1517362633.8001345, "version": "0.0.6" } ] @@ -919,14 +920,18 @@ "id": "70b84195-9817-4b8a-af92-45e380f39894", "blueprint": "glusterfs", "queue_status": "FINISHED", - "timestamp": 1517351003.8210032, + "job_created": 1517351003.8210032, + "job_started": 1517351003.8230415, + "job_finished": 1517359234.1003145, "version": "0.0.6" }, { "id": "e695affd-397f-4af9-9022-add2636e7459", "blueprint": "glusterfs", "queue_status": "FINISHED", - "timestamp": 1517362289.7193348, + "job_created": 1517362289.7193348, + "job_started": 1517362289.9751132, + "job_finished": 1517363500.1234567, "version": "0.0.6" } ] @@ -945,7 +950,9 @@ "id": "8c8435ef-d6bd-4c68-9bf1-a2ef832e6b1a", "blueprint": "http-server", "queue_status": "FAILED", - "timestamp": 1517523249.9301329, + "job_created": 1517523249.9301329, + "job_started": 1517523249.9314211, + "job_finished": 1517523255.5623411, "version": "0.0.2" } ] @@ -964,20 +971,24 @@ "id": "8c8435ef-d6bd-4c68-9bf1-a2ef832e6b1a", "blueprint": "http-server", "queue_status": "FINISHED", - "timestamp": 1517523644.2384307, + "job_created": 1517523644.2384307, + "job_started": 1517523644.2551234, + "job_finished": 1517523689.9864314, "version": "0.0.2" }, { "id": "45502a6d-06e8-48a5-a215-2b4174b3614b", "blueprint": "glusterfs", "queue_status": "FINISHED", - "timestamp": 1517363442.188399, + "job_created": 1517363442.188399, + "job_started": 1517363442.325324, + "job_finished": 1517363451.653621, "version": "0.0.6" } ] } -DELETE `/api/v0/blueprints/cancel/<uuid>` +DELETE `/api/v0/compose/cancel/<uuid>` ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Cancel the build, if it is not finished, and delete the results. It will return a @@ -1126,16 +1137,19 @@ import pytoml as toml from pylorax.sysutils import joinpaths +from pylorax.api.checkparams import checkparams from pylorax.api.compose import start_build, compose_types from pylorax.api.crossdomain import crossdomain +from pylorax.api.errors import * # pylint: disable=wildcard-import from pylorax.api.projects import projects_list, projects_info, projects_depsolve from pylorax.api.projects import modules_list, modules_info, ProjectsError, repo_to_source from pylorax.api.projects import get_repo_sources, delete_repo_source, source_to_repo, dnf_repo_to_file_repo from pylorax.api.queue import queue_status, build_status, uuid_delete, uuid_status, uuid_info from pylorax.api.queue import uuid_tar, uuid_image, uuid_cancel, uuid_log -from pylorax.api.recipes import list_branch_files, read_recipe_commit, recipe_filename, list_commits +from pylorax.api.recipes import RecipeError, list_branch_files, read_recipe_commit, recipe_filename, list_commits from pylorax.api.recipes import recipe_from_dict, recipe_from_toml, commit_recipe, delete_recipe, revert_recipe from pylorax.api.recipes import tag_recipe_commit, recipe_diff +from pylorax.api.regexes import VALID_API_STRING from pylorax.api.workspace import workspace_read, workspace_write, workspace_delete # The API functions don't actually get called by any code here @@ -1154,6 +1168,15 @@ """ return iterable[offset:][:limit]
+
[docs]def blueprint_exists(api, branch, blueprint_name): + try: + with api.config["GITLOCK"].lock: + read_recipe_commit(api.config["GITLOCK"].repo, branch, blueprint_name) + + return True + except RecipeError: + return False
+
[docs]def v0_api(api): # Note that Sphinx will not generate documentations for any of these. @api.route("/api/v0/blueprints/list") @@ -1161,22 +1184,36 @@ def v0_blueprints_list(): """List the available blueprints on a branch.""" branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: limit = int(request.args.get("limit", "20")) offset = int(request.args.get("offset", "0")) except ValueError as e: - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BAD_LIMIT_OR_OFFSET, "msg": str(e)}]), 400 with api.config["GITLOCK"].lock: blueprints = take_limits([f[:-5] for f in list_branch_files(api.config["GITLOCK"].repo, branch)], offset, limit) return jsonify(blueprints=blueprints, limit=limit, offset=offset, total=len(blueprints)) + @api.route("/api/v0/blueprints/info", defaults={'blueprint_names': ""}) @api.route("/api/v0/blueprints/info/<blueprint_names>") @crossdomain(origin="*") + @checkparams([("blueprint_names", "", "no blueprint names given")]) def v0_blueprints_info(blueprint_names): """Return the contents of the blueprint, or a list of blueprints""" + if VALID_API_STRING.match(blueprint_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + out_fmt = request.args.get("format", "json") + if VALID_API_STRING.match(out_fmt) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in format argument"}]), 400 + blueprints = [] changes = [] errors = [] @@ -1202,7 +1239,7 @@ if not ws_blueprint and not git_blueprint: # Neither blueprint, return an error - errors.append("%s: %s" % (blueprint_name, ", ".join(exceptions))) + errors.append({"id": UNKNOWN_BLUEPRINT, "msg": "%s: %s" % (blueprint_name, ", ".join(exceptions))}) elif ws_blueprint and not git_blueprint: # No git blueprint, return the workspace blueprint changes.append({"name":blueprint_name, "changed":True}) @@ -1219,7 +1256,6 @@ # Sort all the results by case-insensitive blueprint name changes = sorted(changes, key=lambda c: c["name"].lower()) blueprints = sorted(blueprints, key=lambda r: r["name"].lower()) - errors = sorted(errors, key=lambda e: e.lower()) if out_fmt == "toml": # With TOML output we just want to dump the raw blueprint, skipping the rest. @@ -1227,32 +1263,44 @@ else: return jsonify(changes=changes, blueprints=blueprints, errors=errors) + @api.route("/api/v0/blueprints/changes", defaults={'blueprint_names': ""}) @api.route("/api/v0/blueprints/changes/<blueprint_names>") @crossdomain(origin="*") + @checkparams([("blueprint_names", "", "no blueprint names given")]) def v0_blueprints_changes(blueprint_names): """Return the changes to a blueprint or list of blueprints""" + if VALID_API_STRING.match(blueprint_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: limit = int(request.args.get("limit", "20")) offset = int(request.args.get("offset", "0")) except ValueError as e: - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BAD_LIMIT_OR_OFFSET, "msg": str(e)}]), 400 blueprints = [] errors = [] for blueprint_name in [n.strip() for n in blueprint_names.split(",")]: filename = recipe_filename(blueprint_name) + + if not blueprint_exists(api, branch, blueprint_name): + errors.append({"id": UNKNOWN_BLUEPRINT, "msg": "Unknown blueprint name: %s" % blueprint_name}) + continue + try: with api.config["GITLOCK"].lock: commits = take_limits(list_commits(api.config["GITLOCK"].repo, branch, filename), offset, limit) except Exception as e: - errors.append("%s: %s" % (blueprint_name, str(e))) + errors.append({"id": BLUEPRINTS_ERROR, "msg": "%s: %s" % (blueprint_name, str(e))}) log.error("(v0_blueprints_changes) %s", str(e)) else: blueprints.append({"name":blueprint_name, "changes":commits, "total":len(commits)}) blueprints = sorted(blueprints, key=lambda r: r["name"].lower()) - errors = sorted(errors, key=lambda e: e.lower()) return jsonify(blueprints=blueprints, errors=errors, offset=offset, limit=limit) @@ -1261,12 +1309,18 @@ def v0_blueprints_new(): """Commit a new blueprint""" branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: if request.headers['Content-Type'] == "text/x-toml": blueprint = recipe_from_toml(request.data) else: blueprint = recipe_from_dict(request.get_json(cache=False)) + if VALID_API_STRING.match(blueprint["name"]) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + with api.config["GITLOCK"].lock: commit_recipe(api.config["GITLOCK"].repo, branch, blueprint) @@ -1275,21 +1329,29 @@ workspace_write(api.config["GITLOCK"].repo, branch, blueprint) except Exception as e: log.error("(v0_blueprints_new) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BLUEPRINTS_ERROR, "msg": str(e)}]), 400 else: return jsonify(status=True) + @api.route("/api/v0/blueprints/delete", defaults={'blueprint_name': ""}, methods=["DELETE"]) @api.route("/api/v0/blueprints/delete/<blueprint_name>", methods=["DELETE"]) @crossdomain(origin="*") + @checkparams([("blueprint_name", "", "no blueprint name given")]) def v0_blueprints_delete(blueprint_name): """Delete a blueprint from git""" + if VALID_API_STRING.match(blueprint_name) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: with api.config["GITLOCK"].lock: delete_recipe(api.config["GITLOCK"].repo, branch, blueprint_name) except Exception as e: log.error("(v0_blueprints_delete) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BLUEPRINTS_ERROR, "msg": str(e)}]), 400 else: return jsonify(status=True) @@ -1298,39 +1360,63 @@ def v0_blueprints_workspace(): """Write a blueprint to the workspace""" branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: if request.headers['Content-Type'] == "text/x-toml": blueprint = recipe_from_toml(request.data) else: blueprint = recipe_from_dict(request.get_json(cache=False)) + if VALID_API_STRING.match(blueprint["name"]) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + with api.config["GITLOCK"].lock: workspace_write(api.config["GITLOCK"].repo, branch, blueprint) except Exception as e: log.error("(v0_blueprints_workspace) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BLUEPRINTS_ERROR, "msg": str(e)}]), 400 else: return jsonify(status=True) + @api.route("/api/v0/blueprints/workspace", defaults={'blueprint_name': ""}, methods=["DELETE"]) @api.route("/api/v0/blueprints/workspace/<blueprint_name>", methods=["DELETE"]) @crossdomain(origin="*") + @checkparams([("blueprint_name", "", "no blueprint name given")]) def v0_blueprints_delete_workspace(blueprint_name): """Delete a blueprint from the workspace""" + if VALID_API_STRING.match(blueprint_name) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: with api.config["GITLOCK"].lock: workspace_delete(api.config["GITLOCK"].repo, branch, blueprint_name) except Exception as e: log.error("(v0_blueprints_delete_workspace) %s", str(e)) - return jsonify(status=False, error=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BLUEPRINTS_ERROR, "msg": str(e)}]), 400 else: return jsonify(status=True) + @api.route("/api/v0/blueprints/undo", defaults={'blueprint_name': "", 'commit': ""}, methods=["POST"]) + @api.route("/api/v0/blueprints/undo/<blueprint_name>", defaults={'commit': ""}, methods=["POST"]) @api.route("/api/v0/blueprints/undo/<blueprint_name>/<commit>", methods=["POST"]) @crossdomain(origin="*") + @checkparams([("blueprint_name", "", "no blueprint name given"), + ("commit", "", "no commit ID given")]) def v0_blueprints_undo(blueprint_name, commit): """Undo changes to a blueprint by reverting to a previous commit.""" + if VALID_API_STRING.match(blueprint_name) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: with api.config["GITLOCK"].lock: revert_recipe(api.config["GITLOCK"].repo, branch, blueprint_name, commit) @@ -1340,29 +1426,50 @@ workspace_write(api.config["GITLOCK"].repo, branch, blueprint) except Exception as e: log.error("(v0_blueprints_undo) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_COMMIT, "msg": str(e)}]), 400 else: return jsonify(status=True) + @api.route("/api/v0/blueprints/tag", defaults={'blueprint_name': ""}, methods=["POST"]) @api.route("/api/v0/blueprints/tag/<blueprint_name>", methods=["POST"]) @crossdomain(origin="*") + @checkparams([("blueprint_name", "", "no blueprint name given")]) def v0_blueprints_tag(blueprint_name): """Tag a blueprint's latest blueprint commit as a 'revision'""" + if VALID_API_STRING.match(blueprint_name) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: with api.config["GITLOCK"].lock: tag_recipe_commit(api.config["GITLOCK"].repo, branch, blueprint_name) except Exception as e: log.error("(v0_blueprints_tag) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BLUEPRINTS_ERROR, "msg": str(e)}]), 400 else: return jsonify(status=True) + @api.route("/api/v0/blueprints/diff", defaults={'blueprint_name': "", 'from_commit': "", 'to_commit': ""}) + @api.route("/api/v0/blueprints/diff/<blueprint_name>", defaults={'from_commit': "", 'to_commit': ""}) + @api.route("/api/v0/blueprints/diff/<blueprint_name>/<from_commit>", defaults={'to_commit': ""}) @api.route("/api/v0/blueprints/diff/<blueprint_name>/<from_commit>/<to_commit>") @crossdomain(origin="*") + @checkparams([("blueprint_name", "", "no blueprint name given"), + ("from_commit", "", "no from commit ID given"), + ("to_commit", "", "no to commit ID given")]) def v0_blueprints_diff(blueprint_name, from_commit, to_commit): """Return the differences between two commits of a blueprint""" + for s in [blueprint_name, from_commit, to_commit]: + if VALID_API_STRING.match(s) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + try: if from_commit == "NEWEST": with api.config["GITLOCK"].lock: @@ -1372,7 +1479,7 @@ old_blueprint = read_recipe_commit(api.config["GITLOCK"].repo, branch, blueprint_name, from_commit) except Exception as e: log.error("(v0_blueprints_diff) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_COMMIT, "msg": str(e)}]), 400 try: if to_commit == "WORKSPACE": @@ -1390,17 +1497,28 @@ new_blueprint = read_recipe_commit(api.config["GITLOCK"].repo, branch, blueprint_name, to_commit) except Exception as e: log.error("(v0_blueprints_diff) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_COMMIT, "msg": str(e)}]), 400 diff = recipe_diff(old_blueprint, new_blueprint) return jsonify(diff=diff) + @api.route("/api/v0/blueprints/freeze", defaults={'blueprint_names': ""}) @api.route("/api/v0/blueprints/freeze/<blueprint_names>") @crossdomain(origin="*") + @checkparams([("blueprint_names", "", "no blueprint names given")]) def v0_blueprints_freeze(blueprint_names): """Return the blueprint with the exact modules and packages selected by depsolve""" + if VALID_API_STRING.match(blueprint_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + out_fmt = request.args.get("format", "json") + if VALID_API_STRING.match(out_fmt) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in format argument"}]), 400 + blueprints = [] errors = [] for blueprint_name in [n.strip() for n in sorted(blueprint_names.split(","), key=lambda n: n.lower())]: @@ -1419,12 +1537,12 @@ with api.config["GITLOCK"].lock: blueprint = read_recipe_commit(api.config["GITLOCK"].repo, branch, blueprint_name) except Exception as e: - errors.append("%s: %s" % (blueprint_name, str(e))) + errors.append({"id": BLUEPRINTS_ERROR, "msg": "%s: %s" % (blueprint_name, str(e))}) log.error("(v0_blueprints_freeze) %s", str(e)) # No blueprint found, skip it. if not blueprint: - errors.append("%s: blueprint_not_found" % (blueprint_name)) + errors.append({"id": UNKNOWN_BLUEPRINT, "msg": "%s: blueprint_not_found" % blueprint_name}) continue # Combine modules and packages and depsolve the list @@ -1435,9 +1553,9 @@ deps = [] try: with api.config["DNFLOCK"].lock: - deps = projects_depsolve(api.config["DNFLOCK"].dbo, projects) + deps = projects_depsolve(api.config["DNFLOCK"].dbo, projects, blueprint.group_names) except ProjectsError as e: - errors.append("%s: %s" % (blueprint_name, str(e))) + errors.append({"id": BLUEPRINTS_ERROR, "msg": "%s: %s" % (blueprint_name, str(e))}) log.error("(v0_blueprints_freeze) %s", str(e)) blueprints.append({"blueprint": blueprint.freeze(deps)}) @@ -1448,11 +1566,19 @@ else: return jsonify(blueprints=blueprints, errors=errors) + @api.route("/api/v0/blueprints/depsolve", defaults={'blueprint_names': ""}) @api.route("/api/v0/blueprints/depsolve/<blueprint_names>") @crossdomain(origin="*") + @checkparams([("blueprint_names", "", "no blueprint names given")]) def v0_blueprints_depsolve(blueprint_names): """Return the dependencies for a blueprint""" + if VALID_API_STRING.match(blueprint_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + branch = request.args.get("branch", "master") + if VALID_API_STRING.match(branch) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in branch argument"}]), 400 + blueprints = [] errors = [] for blueprint_name in [n.strip() for n in sorted(blueprint_names.split(","), key=lambda n: n.lower())]: @@ -1471,12 +1597,12 @@ with api.config["GITLOCK"].lock: blueprint = read_recipe_commit(api.config["GITLOCK"].repo, branch, blueprint_name) except Exception as e: - errors.append("%s: %s" % (blueprint_name, str(e))) + errors.append({"id": BLUEPRINTS_ERROR, "msg": "%s: %s" % (blueprint_name, str(e))}) log.error("(v0_blueprints_depsolve) %s", str(e)) # No blueprint found, skip it. if not blueprint: - errors.append("%s: blueprint not found" % blueprint_name) + errors.append({"id": UNKNOWN_BLUEPRINT, "msg": "%s: blueprint not found" % blueprint_name}) continue # Combine modules and packages and depsolve the list @@ -1487,9 +1613,9 @@ deps = [] try: with api.config["DNFLOCK"].lock: - deps = projects_depsolve(api.config["DNFLOCK"].dbo, projects) + deps = projects_depsolve(api.config["DNFLOCK"].dbo, projects, blueprint.group_names) except ProjectsError as e: - errors.append("%s: %s" % (blueprint_name, str(e))) + errors.append({"id": BLUEPRINTS_ERROR, "msg": "%s: %s" % (blueprint_name, str(e))}) log.error("(v0_blueprints_depsolve) %s", str(e)) # Get the NEVRA's of the modules and projects, add as "modules" @@ -1511,41 +1637,61 @@ limit = int(request.args.get("limit", "20")) offset = int(request.args.get("offset", "0")) except ValueError as e: - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BAD_LIMIT_OR_OFFSET, "msg": str(e)}]), 400 try: with api.config["DNFLOCK"].lock: available = projects_list(api.config["DNFLOCK"].dbo) except ProjectsError as e: log.error("(v0_projects_list) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": PROJECTS_ERROR, "msg": str(e)}]), 400 projects = take_limits(available, offset, limit) return jsonify(projects=projects, offset=offset, limit=limit, total=len(available)) + @api.route("/api/v0/projects/info", defaults={'project_names': ""}) @api.route("/api/v0/projects/info/<project_names>") @crossdomain(origin="*") + @checkparams([("project_names", "", "no project names given")]) def v0_projects_info(project_names): """Return detailed information about the listed projects""" + if VALID_API_STRING.match(project_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + try: with api.config["DNFLOCK"].lock: projects = projects_info(api.config["DNFLOCK"].dbo, project_names.split(",")) except ProjectsError as e: log.error("(v0_projects_info) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": PROJECTS_ERROR, "msg": str(e)}]), 400 + + if not projects: + msg = "one of the requested projects does not exist: %s" % project_names + log.error("(v0_projects_info) %s", msg) + return jsonify(status=False, errors=[{"id": UNKNOWN_PROJECT, "msg": msg}]), 400 return jsonify(projects=projects) + @api.route("/api/v0/projects/depsolve", defaults={'project_names': ""}) @api.route("/api/v0/projects/depsolve/<project_names>") @crossdomain(origin="*") + @checkparams([("project_names", "", "no project names given")]) def v0_projects_depsolve(project_names): """Return detailed information about the listed projects""" + if VALID_API_STRING.match(project_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + try: with api.config["DNFLOCK"].lock: - deps = projects_depsolve(api.config["DNFLOCK"].dbo, [(n, "*") for n in project_names.split(",")]) + deps = projects_depsolve(api.config["DNFLOCK"].dbo, [(n, "*") for n in project_names.split(",")], []) except ProjectsError as e: log.error("(v0_projects_depsolve) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": PROJECTS_ERROR, "msg": str(e)}]), 400 + + if not deps: + msg = "one of the requested projects does not exist: %s" % project_names + log.error("(v0_projects_depsolve) %s", msg) + return jsonify(status=False, errors=[{"id": UNKNOWN_PROJECT, "msg": msg}]), 400 return jsonify(projects=deps) @@ -1558,11 +1704,18 @@ sources = sorted([r.id for r in repos]) return jsonify(sources=sources) + @api.route("/api/v0/projects/source/info", defaults={'source_names': ""}) @api.route("/api/v0/projects/source/info/<source_names>") @crossdomain(origin="*") + @checkparams([("source_names", "", "no source names given")]) def v0_projects_source_info(source_names): """Return detailed info about the list of sources""" + if VALID_API_STRING.match(source_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + out_fmt = request.args.get("format", "json") + if VALID_API_STRING.match(out_fmt) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in format argument"}]), 400 # Return info on all of the sources if source_names == "*": @@ -1576,7 +1729,7 @@ with api.config["DNFLOCK"].lock: repo = api.config["DNFLOCK"].dbo.repos.get(source, None) if not repo: - errors.append("%s is not a valid source" % source) + errors.append({"id": UNKNOWN_SOURCE, "msg": "%s is not a valid source" % source}) continue sources[repo.id] = repo_to_source(repo, repo.id in system_sources) @@ -1600,7 +1753,7 @@ system_sources = get_repo_sources("/etc/yum.repos.d/*.repo") if source["name"] in system_sources: - return jsonify(status=False, errors=["%s is a system source, it cannot be deleted." % source["name"]]), 400 + return jsonify(status=False, errors=[{"id": SYSTEM_SOURCE, "msg": "%s is a system source, it cannot be changed." % source["name"]}]), 400 try: # Remove it from the RepoDict (NOTE that this isn't explicitly supported by the DNF API) @@ -1645,17 +1798,22 @@ dbo.fill_sack(load_system_repo=False) dbo.read_comps() - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": PROJECTS_ERROR, "msg": str(e)}]), 400 return jsonify(status=True) + @api.route("/api/v0/projects/source/delete", defaults={'source_name': ""}, methods=["DELETE"]) @api.route("/api/v0/projects/source/delete/<source_name>", methods=["DELETE"]) @crossdomain(origin="*") + @checkparams([("source_name", "", "no source name given")]) def v0_projects_source_delete(source_name): """Delete the named source and return a status response""" + if VALID_API_STRING.match(source_name) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + system_sources = get_repo_sources("/etc/yum.repos.d/*.repo") if source_name in system_sources: - return jsonify(status=False, errors=["%s is a system source, it cannot be deleted." % source_name]), 400 + return jsonify(status=False, errors=[{"id": SYSTEM_SOURCE, "msg": "%s is a system source, it cannot be deleted." % source_name}]), 400 share_dir = api.config["COMPOSER_CFG"].get("composer", "repo_dir") try: # Remove the file entry for the source @@ -1671,7 +1829,7 @@ except ProjectsError as e: log.error("(v0_projects_source_delete) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_SOURCE, "msg": str(e)}]), 400 return jsonify(status=True) @@ -1680,11 +1838,14 @@ @crossdomain(origin="*") def v0_modules_list(module_names=None): """List available modules, filtering by module_names""" + if module_names and VALID_API_STRING.match(module_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + try: limit = int(request.args.get("limit", "20")) offset = int(request.args.get("offset", "0")) except ValueError as e: - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": BAD_LIMIT_OR_OFFSET, "msg": str(e)}]), 400 if module_names: module_names = module_names.split(",") @@ -1694,21 +1855,35 @@ available = modules_list(api.config["DNFLOCK"].dbo, module_names) except ProjectsError as e: log.error("(v0_modules_list) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": MODULES_ERROR, "msg": str(e)}]), 400 + + if module_names and not available: + msg = "one of the requested modules does not exist: %s" % module_names + log.error("(v0_modules_list) %s", msg) + return jsonify(status=False, errors=[{"id": UNKNOWN_MODULE, "msg": msg}]), 400 modules = take_limits(available, offset, limit) return jsonify(modules=modules, offset=offset, limit=limit, total=len(available)) + @api.route("/api/v0/modules/info", defaults={'module_names': ""}) @api.route("/api/v0/modules/info/<module_names>") @crossdomain(origin="*") + @checkparams([("module_names", "", "no module names given")]) def v0_modules_info(module_names): """Return detailed information about the listed modules""" + if VALID_API_STRING.match(module_names) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 try: with api.config["DNFLOCK"].lock: modules = modules_info(api.config["DNFLOCK"].dbo, module_names.split(",")) except ProjectsError as e: log.error("(v0_modules_info) %s", str(e)) - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": MODULES_ERROR, "msg": str(e)}]), 400 + + if not modules: + msg = "one of the requested modules does not exist: %s" % module_names + log.error("(v0_modules_info) %s", msg) + return jsonify(status=False, errors=[{"id": UNKNOWN_MODULE, "msg": msg}]), 400 return jsonify(modules=modules) @@ -1733,10 +1908,10 @@ errors = [] if not compose: - return jsonify(status=False, errors=["Missing POST body"]), 400 + return jsonify(status=False, errors=[{"id": MISSING_POST, "msg": "Missing POST body"}]), 400 if "blueprint_name" not in compose: - errors.append("No 'blueprint_name' in the JSON request") + errors.append({"id": UNKNOWN_BLUEPRINT,"msg": "No 'blueprint_name' in the JSON request"}) else: blueprint_name = compose["blueprint_name"] @@ -1746,10 +1921,13 @@ branch = compose["branch"] if "compose_type" not in compose: - errors.append("No 'compose_type' in the JSON request") + errors.append({"id": BAD_COMPOSE_TYPE, "msg": "No 'compose_type' in the JSON request"}) else: compose_type = compose["compose_type"] + if VALID_API_STRING.match(blueprint_name) is None: + errors.append({"id": INVALID_CHARS, "msg": "Invalid characters in API path"}) + if errors: return jsonify(status=False, errors=errors), 400 @@ -1757,7 +1935,10 @@ build_id = start_build(api.config["COMPOSER_CFG"], api.config["DNFLOCK"], api.config["GITLOCK"], branch, blueprint_name, compose_type, test_mode) except Exception as e: - return jsonify(status=False, errors=[str(e)]), 400 + if "Invalid compose type" in str(e): + return jsonify(status=False, errors=[{"id": BAD_COMPOSE_TYPE, "msg": str(e)}]), 400 + else: + return jsonify(status=False, errors=[{"id": BUILD_FAILED, "msg": str(e)}]), 400 return jsonify(status=True, build_id=build_id) @@ -1789,152 +1970,203 @@ """Return the list of failed composes""" return jsonify(failed=build_status(api.config["COMPOSER_CFG"], "FAILED")) + @api.route("/api/v0/compose/status", defaults={'uuids': ""}) @api.route("/api/v0/compose/status/<uuids>") @crossdomain(origin="*") + @checkparams([("uuids", "", "no UUIDs given")]) def v0_compose_status(uuids): """Return the status of the listed uuids""" + if VALID_API_STRING.match(uuids) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + results = [] + errors = [] for uuid in [n.strip().lower() for n in uuids.split(",")]: details = uuid_status(api.config["COMPOSER_CFG"], uuid) if details is not None: results.append(details) + else: + errors.append({"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}) - return jsonify(uuids=results) + return jsonify(uuids=results, errors=errors) + @api.route("/api/v0/compose/cancel", defaults={'uuid': ""}, methods=["DELETE"]) @api.route("/api/v0/compose/cancel/<uuid>", methods=["DELETE"]) @crossdomain(origin="*") + @checkparams([("uuid", "", "no UUID given")]) def v0_compose_cancel(uuid): """Cancel a running compose and delete its results directory""" + if VALID_API_STRING.match(uuid) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + status = uuid_status(api.config["COMPOSER_CFG"], uuid) if status is None: - return jsonify(status=False, errors=["%s is not a valid build uuid" % uuid]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}]), 400 if status["queue_status"] not in ["WAITING", "RUNNING"]: - return jsonify(status=False, errors=["Build %s is not in WAITING or RUNNING." % uuid]) + return jsonify(status=False, errors=[{"id": BUILD_IN_WRONG_STATE, "msg": "Build %s is not in WAITING or RUNNING." % uuid}]) try: uuid_cancel(api.config["COMPOSER_CFG"], uuid) except Exception as e: - return jsonify(status=False, errors=["%s: %s" % (uuid, str(e))]), 400 + return jsonify(status=False, errors=[{"id": COMPOSE_ERROR, "msg": "%s: %s" % (uuid, str(e))}]),400 else: return jsonify(status=True, uuid=uuid) + @api.route("/api/v0/compose/delete", defaults={'uuids': ""}, methods=["DELETE"]) @api.route("/api/v0/compose/delete/<uuids>", methods=["DELETE"]) @crossdomain(origin="*") + @checkparams([("uuids", "", "no UUIDs given")]) def v0_compose_delete(uuids): """Delete the compose results for the listed uuids""" + if VALID_API_STRING.match(uuids) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + results = [] errors = [] for uuid in [n.strip().lower() for n in uuids.split(",")]: status = uuid_status(api.config["COMPOSER_CFG"], uuid) if status is None: - errors.append("%s is not a valid build uuid" % uuid) + errors.append({"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}) elif status["queue_status"] not in ["FINISHED", "FAILED"]: - errors.append("Build %s is not in FINISHED or FAILED." % uuid) + errors.append({"id": BUILD_IN_WRONG_STATE, "msg": "Build %s is not in FINISHED or FAILED." % uuid}) else: try: uuid_delete(api.config["COMPOSER_CFG"], uuid) except Exception as e: - errors.append("%s: %s" % (uuid, str(e))) + errors.append({"id": COMPOSE_ERROR, "msg": "%s: %s" % (uuid, str(e))}) else: results.append({"uuid":uuid, "status":True}) return jsonify(uuids=results, errors=errors) + @api.route("/api/v0/compose/info", defaults={'uuid': ""}) @api.route("/api/v0/compose/info/<uuid>") @crossdomain(origin="*") + @checkparams([("uuid", "", "no UUID given")]) def v0_compose_info(uuid): """Return detailed info about a compose""" + if VALID_API_STRING.match(uuid) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + try: info = uuid_info(api.config["COMPOSER_CFG"], uuid) except Exception as e: - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": COMPOSE_ERROR, "msg": str(e)}]), 400 - return jsonify(**info) + if info is None: + return jsonify(status=False, errors=[{"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}]), 400 + else: + return jsonify(**info) + @api.route("/api/v0/compose/metadata", defaults={'uuid': ""}) @api.route("/api/v0/compose/metadata/<uuid>") @crossdomain(origin="*") + @checkparams([("uuid","", "no UUID given")]) def v0_compose_metadata(uuid): """Return a tar of the metadata for the build""" + if VALID_API_STRING.match(uuid) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + status = uuid_status(api.config["COMPOSER_CFG"], uuid) if status is None: - return jsonify(status=False, errors=["%s is not a valid build uuid" % uuid]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}]), 400 if status["queue_status"] not in ["FINISHED", "FAILED"]: - return jsonify(status=False, errors=["Build %s not in FINISHED or FAILED state." % uuid]), 400 + return jsonify(status=False, errors=[{"id": BUILD_IN_WRONG_STATE, "msg": "Build %s not in FINISHED or FAILED state." % uuid}]), 400 else: return Response(uuid_tar(api.config["COMPOSER_CFG"], uuid, metadata=True, image=False, logs=False), mimetype="application/x-tar", headers=[("Content-Disposition", "attachment; filename=%s-metadata.tar;" % uuid)], direct_passthrough=True) + @api.route("/api/v0/compose/results", defaults={'uuid': ""}) @api.route("/api/v0/compose/results/<uuid>") @crossdomain(origin="*") + @checkparams([("uuid","", "no UUID given")]) def v0_compose_results(uuid): """Return a tar of the metadata and the results for the build""" + if VALID_API_STRING.match(uuid) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + status = uuid_status(api.config["COMPOSER_CFG"], uuid) if status is None: - return jsonify(status=False, errors=["%s is not a valid build uuid" % uuid]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}]), 400 elif status["queue_status"] not in ["FINISHED", "FAILED"]: - return jsonify(status=False, errors=["Build %s not in FINISHED or FAILED state." % uuid]), 400 + return jsonify(status=False, errors=[{"id": BUILD_IN_WRONG_STATE, "msg": "Build %s not in FINISHED or FAILED state." % uuid}]), 400 else: return Response(uuid_tar(api.config["COMPOSER_CFG"], uuid, metadata=True, image=True, logs=True), mimetype="application/x-tar", headers=[("Content-Disposition", "attachment; filename=%s.tar;" % uuid)], direct_passthrough=True) + @api.route("/api/v0/compose/logs", defaults={'uuid': ""}) @api.route("/api/v0/compose/logs/<uuid>") @crossdomain(origin="*") + @checkparams([("uuid","", "no UUID given")]) def v0_compose_logs(uuid): """Return a tar of the metadata for the build""" + if VALID_API_STRING.match(uuid) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + status = uuid_status(api.config["COMPOSER_CFG"], uuid) if status is None: - return jsonify(status=False, errors=["%s is not a valid build uuid" % uuid]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}]), 400 elif status["queue_status"] not in ["FINISHED", "FAILED"]: - return jsonify(status=False, errors=["Build %s not in FINISHED or FAILED state." % uuid]), 400 + return jsonify(status=False, errors=[{"id": BUILD_IN_WRONG_STATE, "msg": "Build %s not in FINISHED or FAILED state." % uuid}]), 400 else: return Response(uuid_tar(api.config["COMPOSER_CFG"], uuid, metadata=False, image=False, logs=True), mimetype="application/x-tar", headers=[("Content-Disposition", "attachment; filename=%s-logs.tar;" % uuid)], direct_passthrough=True) + @api.route("/api/v0/compose/image", defaults={'uuid': ""}) @api.route("/api/v0/compose/image/<uuid>") @crossdomain(origin="*") + @checkparams([("uuid","", "no UUID given")]) def v0_compose_image(uuid): """Return the output image for the build""" + if VALID_API_STRING.match(uuid) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + status = uuid_status(api.config["COMPOSER_CFG"], uuid) if status is None: - return jsonify(status=False, errors=["%s is not a valid build uuid" % uuid]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}]), 400 elif status["queue_status"] not in ["FINISHED", "FAILED"]: - return jsonify(status=False, errors=["Build %s not in FINISHED or FAILED state." % uuid]), 400 + return jsonify(status=False, errors=[{"id": BUILD_IN_WRONG_STATE, "msg": "Build %s not in FINISHED or FAILED state." % uuid}]), 400 else: image_name, image_path = uuid_image(api.config["COMPOSER_CFG"], uuid) # Make sure it really exists if not os.path.exists(image_path): - return jsonify(status=False, errors=["Build %s is missing image file %s" % (uuid, image_name)]), 400 + return jsonify(status=False, errors=[{"id": BUILD_MISSING_FILE, "msg": "Build %s is missing image file %s" % (uuid, image_name)}]), 400 # Make the image name unique image_name = uuid + "-" + image_name # XXX - Will mime type guessing work for all our output? return send_file(image_path, as_attachment=True, attachment_filename=image_name, add_etags=False) + @api.route("/api/v0/compose/log", defaults={'uuid': ""}) @api.route("/api/v0/compose/log/<uuid>") @crossdomain(origin="*") + @checkparams([("uuid","", "no UUID given")]) def v0_compose_log_tail(uuid): """Return the end of the main anaconda.log, defaults to 1Mbytes""" + if VALID_API_STRING.match(uuid) is None: + return jsonify(status=False, errors=[{"id": INVALID_CHARS, "msg": "Invalid characters in API path"}]), 400 + try: size = int(request.args.get("size", "1024")) except ValueError as e: - return jsonify(status=False, errors=[str(e)]), 400 + return jsonify(status=False, errors=[{"id": COMPOSE_ERROR, "msg": str(e)}]), 400 status = uuid_status(api.config["COMPOSER_CFG"], uuid) if status is None: - return jsonify(status=False, errors=["%s is not a valid build uuid" % uuid]), 400 + return jsonify(status=False, errors=[{"id": UNKNOWN_UUID, "msg": "%s is not a valid build uuid" % uuid}]), 400 elif status["queue_status"] == "WAITING": - return jsonify(status=False, errors=["Build %s has not started yet. No logs to view" % uuid]) + return jsonify(status=False, errors=[{"id": BUILD_IN_WRONG_STATE, "msg": "Build %s has not started yet. No logs to view" % uuid}]) try: return Response(uuid_log(api.config["COMPOSER_CFG"], uuid, size), direct_passthrough=True) except RuntimeError as e: - return jsonify(status=False, errors=[str(e)]), 400
+ return jsonify(status=False, errors=[{"id": COMPOSE_ERROR, "msg": str(e)}]), 400
@@ -1969,7 +2201,7 @@