composer-cli: Remove all traces of composer-cli

weldr-client has replaced composer-cli so remove all of the code and
tests, adjust various things so they don't expect it to be available,
and rename some things like test/composertest.py to reflect its
exclusive use by lorax.

(cherry picked from commit b75b692607)

Resolves: rhbz#1952978
This commit is contained in:
Brian C. Lane 2021-04-23 11:57:15 -07:00
parent 743c16279f
commit d16e2579d5
43 changed files with 27 additions and 5453 deletions

View File

@ -25,7 +25,6 @@ To run the broader unit and integration tests we use:
$ make test
Some of the tests will be skipped unless a lorax-composer process is running
and listening on an accessible socket. Either run lorax-composer from the
checkout, or the installed version.
The tests may also be run using a podman container:
$ make test-in-podman

View File

@ -5,7 +5,6 @@ mandir ?= $(PREFIX)/share/man
DOCKER ?= podman
DOCS_VERSION ?= next
RUN_TESTS ?= ci
BACKEND ?= osbuild-composer
PKGNAME = lorax
VERSION = $(shell awk '/Version:/ { print $$2 }' $(PKGNAME).spec)
@ -28,13 +27,10 @@ endif
default: all
src/composer/version.py: lorax.spec
echo "num = '$(VERSION)-$(RELEASE)'" > src/composer/version.py
src/pylorax/version.py: lorax.spec
echo "num = '$(VERSION)-$(RELEASE)'" > src/pylorax/version.py
all: src/pylorax/version.py src/composer/version.py
all: src/pylorax/version.py
$(PYTHON) setup.py build
install: all
@ -42,7 +38,6 @@ install: all
mkdir -p $(DESTDIR)/$(mandir)/man1
install -m 644 docs/man/*.1 $(DESTDIR)/$(mandir)/man1
mkdir -p $(DESTDIR)/etc/bash_completion.d
install -m 644 etc/bash_completion.d/composer-cli $(DESTDIR)/etc/bash_completion.d
check:
@echo "*** Running pylint ***"
@ -51,8 +46,7 @@ check:
test:
@echo "*** Running tests ***"
PYTHONPATH=$(PYTHONPATH):./src/ $(PYTHON) -X dev -m pytest -v --cov-branch \
--cov=pylorax --cov=composer \
./tests/pylorax/ ./tests/composer/
--cov=pylorax ./tests/pylorax/
coverage3 report -m
[ -f "/usr/bin/coveralls" ] && [ -n "$(COVERALLS_REPO_TOKEN)" ] && coveralls || echo
@ -75,7 +69,6 @@ clean_cloud_envs:
clean:
-rm -rf build src/pylorax/version.py
-rm -rf build src/composer/version.py
tag:
git tag -f $(TAG)
@ -142,7 +135,7 @@ $(VM_IMAGE): srpm bots
--upload $(CURDIR)/test/vm.install:/var/tmp/vm.install \
--upload $(realpath tests):/ \
--run-command "chmod +x /var/tmp/vm.install" \
--run-command "cd /var/tmp; BACKEND=$(BACKEND) /var/tmp/vm.install $$srpm" \
--run-command "cd /var/tmp; /var/tmp/vm.install $$srpm" \
$(TEST_OS)
[ -f ~/.config/lorax-test-env ] && bots/image-customize \
--upload ~/.config/lorax-test-env:/var/tmp/lorax-test-env \
@ -157,16 +150,6 @@ vm: $(VM_IMAGE)
# and update the image. Mostly used when testing downstream snapshots to make
# sure VM_IMAGE is as close as possible to the host!
vm-local-repos: vm
bots/image-customize -v \
--run-command "rm -rf /etc/yum.repos.d" \
$(TEST_OS)
bots/image-customize -v \
--upload $(REPOS_DIR):/etc/yum.repos.d \
--run-command "yum -y remove composer-cli $(BACKEND)" \
--run-command "yum -y update" \
--run-command "yum -y install composer-cli $(BACKEND)" \
--run-command "systemctl enable $(BACKEND).socket" \
$(TEST_OS)
vm-reset:
rm -f $(VM_IMAGE) $(VM_IMAGE).qcow2

View File

@ -2,6 +2,5 @@ Lorax is a set of tools used to create bootable images.
* lorax - creates the Anaconda boot.iso used to install Fedora
* livemedia-creator - uses Anaconda to create bootable images
* lorax-composer - API server implementing the Weldr BDCS protocol using livemedia-creator
See the [Weldr blog](https://weldr.io) for more info about BDCS and the [Lorax documentation](https://weldr.io/lorax) for more information about Lorax and associated tools.

View File

@ -1,199 +0,0 @@
# bash completion for composer-cli
__composer_cli_flags="-h --help -j --json -s --socket --log -a --api --test -V"
declare -A __composer_cli_cmds=(
[compose]="list start start-ostree types status log cancel delete info metadata logs results image"
[blueprints]="list show changes diff save delete depsolve push freeze tag undo workspace"
[modules]="list"
[projects]="list info"
[sources]="list info add change delete"
[upload]="list info start log cancel delete reset"
[providers]="list info show push save delete template"
[help]=""
)
__composer_socket_ok() {
[ -w "${COMPOSER_SOCKET:-/run/weldr/api.socket}" ]
}
__composer_blueprints() {
__composer_socket_ok && composer-cli blueprints list
}
__composer_sources() {
__composer_socket_ok && composer-cli sources list
}
__composer_compose_types() {
__composer_socket_ok && composer-cli compose types
}
__composer_composes() {
__composer_socket_ok && composer-cli compose list $@ | while read id rest; do echo $id; done
}
__composer_provider_list() {
__composer_socket_ok && composer-cli providers list
}
__composer_profile_list() {
__composer_socket_ok && composer-cli providers list $1
}
__word_in_list() {
local w word=$1; shift
for w in "$@"; do
[ "$w" == "$word" ] && return 0
done
return 1
}
_composer_cli() {
local cur="${COMP_WORDS[COMP_CWORD]}" prev="${COMP_WORDS[COMP_CWORD-1]}"
local w="" wi=0 cmd="__NONE__" subcmd="__NONE__" cmd_cword=0
# find the command and its subcommand
for (( wi=0; wi < ${#COMP_WORDS[*]}; wi++ )); do
if __word_in_list "${COMP_WORDS[wi]}" "${!__composer_cli_cmds[@]}"; then
cmd="${COMP_WORDS[wi]}"
subcmd="${COMP_WORDS[wi+1]}"
cmd_cword=$((COMP_CWORD-wi))
break
fi
done
COMPREPLY=()
if [ "$cmd_cword" -le 0 ]; then
# No command yet, complete flags or commands
case "$prev" in
-s|--socket|--log)
# If it's a flag that takes a filename, suggest filenames
compopt -o filenames
COMPREPLY=($(compgen -f -- "${cur}"))
;;
-a|--api|--test)
# If it's a flag that takes an arg we can't guess, don't suggest anything
COMPREPLY=()
;;
*)
if [ "${cur:0:1}" == "-" ]; then
# Suggest flags if cur starts with '-'
COMPREPLY=($(compgen -W "${__composer_cli_flags}" -- "${cur}"))
else
# Suggest commands if there isn't one already
COMPREPLY=($(compgen -W "${!__composer_cli_cmds[*]}" -- "${cur}"))
fi
;;
esac
elif [ $cmd_cword == 1 ]; then
# Complete the word after the command
COMPREPLY=($(compgen -W "${__composer_cli_cmds[$cmd]} help" -- "${cur}"))
elif [ $cmd_cword == 2 ]; then
# Complete word(s) after subcommand
case "$cmd:$subcmd" in
compose:list)
COMPREPLY=($(compgen -W "waiting running finish failed" -- "${cur}"))
;;
providers:list|providers:template)
COMPREPLY=($(compgen -W "$(__composer_provider_list)" -- "${cur}"))
;;
*:list|*:help|compose:types)
COMPREPLY=()
;;
sources:info|sources:delete)
COMPREPLY=($(compgen -W "$(__composer_sources)" -- "${cur}"))
;;
sources:add|sources:change|blueprints:workspace|blueprints:push|providers:push)
compopt -o filenames
COMPREPLY=($(compgen -f -- "${cur}"))
;;
blueprints:freeze)
COMPREPLY=($(compgen -W "$(__composer_blueprints) show save" -- "${cur}"))
;;
compose:start|compose:start-ostree|blueprints:*)
COMPREPLY=($(compgen -W "$(__composer_blueprints)" -- "${cur}"))
;;
compose:cancel)
COMPREPLY=($(compgen -W "$(__composer_composes running waiting)" -- "${cur}"))
;;
compose:delete|compose:results|compose:metadata)
COMPREPLY=($(compgen -W "$(__composer_composes finished failed)" -- "${cur}"))
;;
compose:log*)
COMPREPLY=($(compgen -W "$(__composer_composes running finished failed)" -- "${cur}"))
;;
compose:image)
COMPREPLY=($(compgen -W "$(__composer_composes finished)" -- "${cur}"))
;;
compose:*)
COMPREPLY=($(compgen -W "$(__composer_composes)" -- "${cur}"))
;;
upload:start)
COMPREPLY=($(compgen -W "$(__composer_composes)" -- "${cur}"))
;;
providers:show|providers:save|providers:delete|providers:info)
COMPREPLY=($(compgen -W "$(__composer_provider_list)" -- "${cur}"))
;;
esac
else
# Complete words past the subcommand's argument (if appropriate)
case "$cmd:$subcmd" in
compose:delete)
COMPREPLY=($(compgen -W "$(__composer_composes finished failed)" -- "${cur}"))
;;
compose:start|compose:start-ostree)
subpos="$subcmd:$cmd_cword"
if [ "$cmd_cword" == 3 ]; then
COMPREPLY=($(compgen -W "$(__composer_compose_types)" -- "${cur}"))
elif [ "$subpos" == "start:5" ] || [ "$subpos" == "start-ostree:7" ]; then
# If they have typed something looking like a path, use file completion
# otherwise suggest providers.
case "${cur}" in
*/*)
compopt -o filenames
COMPREPLY=($(compgen -f -- "${cur}"))
;;
*)
COMPREPLY=($(compgen -W "$(__composer_provider_list)" -- "${cur}"))
;;
esac
elif [ "$subpos" == "start:6" ] || [ "$subpos" == "start-ostree:8" ]; then
COMPREPLY=($(compgen -W "$(__composer_profile_list ${prev})" -- "${cur}"))
fi
;;
# TODO: blueprints:diff and blueprints:undo want commits
blueprints:freeze|blueprints:save|blueprints:depsolve|blueprints:changes|blueprints:show)
COMPREPLY=($(compgen -W "$(__composer_blueprints)" -- "${cur}"))
;;
sources:info)
COMPREPLY=($(compgen -W "$(__composer_sources)" -- "${cur}"))
;;
upload:start)
if [ "$cmd_cword" == 4 ]; then
# If they have typed something looking like a path, use file completion
# otherwise suggest providers.
case "${cur}" in
*/*)
compopt -o filenames
COMPREPLY=($(compgen -f -- "${cur}"))
;;
*)
COMPREPLY=($(compgen -W "$(__composer_provider_list)" -- "${cur}"))
;;
esac
elif [ "$cmd_cword" == 5 ]; then
COMPREPLY=($(compgen -W "$(__composer_profile_list ${prev})" -- "${cur}"))
fi
;;
providers:show|providers:save|providers:delete)
if [ "$cmd_cword" == 3 ]; then
COMPREPLY=($(compgen -W "$(__composer_profile_list ${prev})" -- "${cur}"))
fi
;;
esac
fi
}
complete -F _composer_cli composer-cli

View File

@ -136,17 +136,6 @@ Provides: lorax-templates = %{version}-%{release}
Lorax templates for creating the boot.iso and live isos are placed in
/usr/share/lorax/templates.d/99-generic
%package -n composer-cli
Summary: A command line tool for use with the lorax-composer API server
# From Distribution
Requires: python3-urllib3
Requires: python3-toml
%description -n composer-cli
A command line tool for use with the lorax-composer API server. Examine blueprints,
build images, etc. from the command line.
%prep
%autosetup -n %{name}-%{version} -p1
@ -160,8 +149,7 @@ make DESTDIR=$RPM_BUILD_ROOT mandir=%{_mandir} install
%defattr(-,root,root,-)
%license COPYING
%doc AUTHORS
%doc docs/composer-cli.rst docs/lorax.rst docs/livemedia-creator.rst docs/product-images.rst
%doc docs/lorax-composer.rst
%doc docs/lorax.rst docs/livemedia-creator.rst docs/product-images.rst
%doc docs/*ks
%{python3_sitelib}/pylorax
%{python3_sitelib}/*.egg-info
@ -189,12 +177,6 @@ make DESTDIR=$RPM_BUILD_ROOT mandir=%{_mandir} install
%dir %{_datadir}/lorax/templates.d
%{_datadir}/lorax/templates.d/*
%files -n composer-cli
%{_bindir}/composer-cli
%{python3_sitelib}/composer/*
%{_sysconfdir}/bash_completion.d/composer-cli
%{_mandir}/man1/composer-cli.1*
%changelog
* Mon Feb 15 2021 Brian C. Lane <bcl@redhat.com> 34.9-1
- Use inst.rescue to trigger rescue mode (awilliam@redhat.com)
@ -351,375 +333,3 @@ make DESTDIR=$RPM_BUILD_ROOT mandir=%{_mandir} install
- lorax: Update how the release package is chosen (bcl@redhat.com)
- ltmpl: Fix package logging format (bcl@redhat.com)
Resolves: rhbz#1815000
* Mon Mar 16 2020 Brian C. Lane <bcl@redhat.com> 32.7-1
- lorax: Write package lists in run_transaction (bcl@redhat.com)
- Add dig and comm to the boot.iso (bcl@redhat.com)
- lorax-composer: Add 'weldr' to indicate it supports the weldr API (bcl@redhat.com)
- lorax: Cleanup the removefrom --allbut files (bcl@redhat.com)
- lorax: Add eject back into the boot.iso (bcl@redhat.com)
* Wed Feb 12 2020 Brian C. Lane <bcl@redhat.com> 32.6-1
- New lorax documentation - 32.6 (bcl@redhat.com)
- Update mock documentation to remove --old-chroot (bcl@redhat.com)
- Use .tasks file to trigger removal of stale cloud resources (atodorov@redhat.com)
- tests: OpenStack - apply tags and delete by tags (atodorov@redhat.com)
- tests: Azure - apply tags and delete by tags (atodorov@redhat.com)
- tests: VMware - delete only VMs named Composer-Test-* (atodorov@redhat.com)
- tests: AWS - apply tags when creating resoures and delete by tags (atodorov@redhat.com)
- Reflect fonts packages from comps (akira@tagoh.org)
- lorax: Catch rootfs out of space failures (bcl@redhat.com)
- pylint: whitelist the rpm module (bcl@redhat.com)
- tests: Move the list of packages out of Dockerfile.test into a file (bcl@redhat.com)
- tests: remove ALI_DIR after we're done using the cli (atodorov@redhat.com)
- Test & cleanup script for Alibaba cloud (atodorov@redhat.com)
- tests: run ssh commands in batch mode (jrusz@redhat.com)
- lorax: Log dnf solver debug data in ./debugdata/ (bcl@redhat.com)
- tests: remove --test=2 from compose_sanity (jrusz@redhat.com)
* Thu Jan 16 2020 Brian C. Lane <bcl@redhat.com> 32.5-1
- New lorax documentation - 32.5 (bcl@redhat.com)
- tests: Use mock from unittest (bcl@redhat.com)
- Add --dracut-conf cmdline argument to lorax and livemedia-creator (bcl@redhat.com)
- Add tests for metapackages and package name globs (bcl@redhat.com)
- executils: Drop bufsize=1 from execReadlines (bcl@redhat.com)
- tests: unittest and pytest expect functions to start with test_ (bcl@redhat.com)
- Update to_timeval usage to use format_iso8601 (bcl@redhat.com)
- ltmpl: Update to use collections.abc (bcl@redhat.com)
- test: Use pytest instead of nose (bcl@redhat.com)
- tests: Check for cloud-init presence in azure image (jrusz@redhat.com)
- tests: check for failed compose before trying to cancel (jrusz@redhat.com)
- tests: Enable Elastic Network Adapter support for AWS (atodorov@redhat.com)
- lorax-composer: Enable ami on aarch64 (bcl@redhat.com)
* Fri Jan 10 2020 Brian C. Lane <bcl@redhat.com> 32.4-1
- livemedia-creator: workaround glibc limitation when starting anaconda (dan@danny.cz)
- AWS test: take into account different instance type for non x86 (atodorov@redhat.com)
- Add test for canceling a running compose (jrusz@redhat.com)
- composer-cli: Increase DELETE timeout to 120s (bcl@redhat.com)
- anaconda_cleanup: Remove anaconda.pid if it is left behind (bcl@redhat.com)
- New lorax documentation - 32.4 (bcl@redhat.com)
- docs: Add documentation for new mkksiso --volid feature (bcl@redhat.com)
- mkksiso: Add the option to set the ISO volume label (florian.achleitner@prime-sign.com)
- spec: Add missing BuildRequires: make (florian.achleitner@prime-sign.com)
- tests: Use wildcard versions for packages (bcl@redhat.com)
- composer-cli: Only display the available compose types (bcl@redhat.com)
- fix typo in api docstring (obudai@redhat.com)
- Remove all repo files & install composer-cli from host repos (atodorov@redhat.com)
- Always remove lorax-composer & composer-cli RPMs before installing them (atodorov@redhat.com)
- Always remove existing VM image before building new one (atodorov@redhat.com)
- Add git to Dockerfile.test (bcl@redhat.com)
* Mon Nov 18 2019 Brian C. Lane <bcl@redhat.com> 32.3-1
- lorax-composer: Add cloud-init support to the vhd image (bcl@redhat.com)
- tests: add docker variable to .travis.yml (jrusz@redhat.com)
- tests: Changed Docker to podman in Makefile (jrusz@redhat.com)
- tests: fix blueprints tag test (jrusz@redhat.com)
- test: fix serializing repo_to_source test (jrusz@redhat.com)
- composer-cli: Return int from handle_api_result not bool (bcl@redhat.com)
- mkksiso: copy all the directories over to tmpdir (bcl@redhat.com)
- Add dmidecode on supported architectures (bcl@redhat.com)
- docs: Remove --title from list of lmc variables (bcl@redhat.com)
- Drop old lorax.spec changelog entries (pre-F31) (bcl@redhat.com)
* Tue Nov 05 2019 Brian C. Lane <bcl@redhat.com> 32.2-1
- New lorax documentation - 32.2 (bcl@redhat.com)
- tests: Add 'test_mkksiso' tests (bcl@redhat.com)
- mkksiso: Add documentation (bcl@redhat.com)
- mkksiso: Add a tool to add a kickstart to an existing boot.iso (bcl@redhat.com)
- tests: Add a lorax boot.iso test (bcl@redhat.com)
- test: Add wait_boot method for root logins (bcl@redhat.com)
- tests: Ensure failure if beakerlib results file not found (atodorov@redhat.com)
- tests: Documentation updates (atodorov@redhat.com)
- tests: Use host repositories for make vm (atodorov@redhat.com)
- Remove unused make targets (atodorov@redhat.com)
- DRY when setting up, running & parsing results for beakerlib tests (atodorov@redhat.com)
- tests: Disable mirrors (atodorov@redhat.com)
- tests: Use journalctl -g to check for failed login (bcl@redhat.com)
- tests: Fix check_root_account when used with tar liveimg test (bcl@redhat.com)
- tests: Use the same asserts as before (atodorov@redhat.com)
- tests: switch to using podman instead of docker (atodorov@redhat.com)
- tests: Remove nested vm from tar liveimg kickstart test (bcl@redhat.com)
- tests: Use --http0.9 for curl ssh test (bcl@redhat.com)
- test: Boot the live-iso faster, and login using ssh key (bcl@redhat.com)
- test: Split up the test class to allow booting other images (bcl@redhat.com)
- tests: Split testing the image into a separate script (bcl@redhat.com)
- Add live iso support to s390 (bcl@redhat.com)
- docs: Override macboot/nomacboot documentation (bcl@redhat.com)
- Disable some compose types on other architectures (bcl@redhat.com)
- lorax: Drop unused --title option (bcl@redhat.com)
- tests: Document Azure setup (atodorov@redhat.com)
- tests: unskip Azure scenario (atodorov@redhat.com)
* Wed Oct 16 2019 Brian C. Lane <bcl@redhat.com> 32.1-1
- Bump default platform and releasever to 32 (bcl@redhat.com)
- New lorax documentation - 32.1 (bcl@redhat.com)
- docs: Fix Sphinx errors in docstrings (bcl@redhat.com)
- vm.install: Turn on verbose output (bcl@redhat.com)
- tests: Switch the azure examples used in the lifted tests to use aws (bcl@redhat.com)
- Remove lifted azure support (bcl@redhat.com)
- composer-cli: Add providers info <PROVIDER> command (bcl@redhat.com)
- composer-cli: Fix error handling in providers push (bcl@redhat.com)
- composer-cli: Fix upload log output (bcl@redhat.com)
- Add list to bash completion for composer-cli upload (bcl@redhat.com)
- Update composer-cli documentation (bcl@redhat.com)
- Add composer and lifted to coverage report (bcl@redhat.com)
- composer-cli: Add starting an upload to compose start (bcl@redhat.com)
- composer-cli: Add providers template command (bcl@redhat.com)
- bash_completion: Add support for new composer-cli commands (bcl@redhat.com)
- composer-cli: Add support for providers command (bcl@redhat.com)
- composer-cli: Add support for upload command (bcl@redhat.com)
- Increase ansible verbosity to 2 (bcl@redhat.com)
- lifted: Add support for AWS upload (bcl@redhat.com)
- lifted: Improve logging for upload playbooks (bcl@redhat.com)
- Add upload status examples to compose route docstrings (bcl@redhat.com)
- tests: Add tests for deleting unknown upload and profile (bcl@redhat.com)
- Add docstrings to the new upload functions in pylorax.api.queue (bcl@redhat.com)
- Change /compose/uploads/delete to /upload/delete (bcl@redhat.com)
- tests: Add test for /compose/uploads/delete (bcl@redhat.com)
- tests: Add tests for /compose/uploads/schedule (bcl@redhat.com)
- Add profile support to /uploads/schedule/ (bcl@redhat.com)
- tests: Fix comments about V1 API results including uploads data (bcl@redhat.com)
- lifted: Make sure inputs cannot have path elements (bcl@redhat.com)
- Use consistent naming for upload uuids (bcl@redhat.com)
- tests: Add tests for new upload routes (bcl@redhat.com)
- Fix some docstrings in the v1 API (bcl@redhat.com)
- lifted: Make sure providers list is always sorted (bcl@redhat.com)
- Add /upload/providers/delete route (bcl@redhat.com)
- lifted: Add delete_profile function and tests (bcl@redhat.com)
- Add support for starting a compose upload with the profile (bcl@redhat.com)
- lifted: Add a function to load the settings for a provider's profile (bcl@redhat.com)
- Fix pylint errors in lifted.upload (bcl@redhat.com)
- tests: Add yamllint of the lifted playbooks (bcl@redhat.com)
- tests: Add tests for the new lifted module (bcl@redhat.com)
- All providers should have 'supported_types' (bcl@redhat.com)
- lifted directories should be under share_dir and lib_dir (bcl@redhat.com)
- tests: Add tests for API v1 (bcl@redhat.com)
- Make sure V0 API doesn't return uploads information (bcl@redhat.com)
- Automatically upload composed images to the cloud (egoode@redhat.com)
- Add load and dump to pylorax.api.toml (egoode@redhat.com)
- Support CI testing against a bots project PR (martin@piware.de)
- Makefile: Don't clobber an existing bots checkout (martin@piware.de)
- lorax-composer: Handle RecipeError in commit_recipe_directory (bcl@redhat.com)
- test: Disable pylint subprocess check check (bcl@redhat.com)
* Mon Sep 30 2019 Brian C. Lane <bcl@redhat.com> 32.0-1
- aarch64: Fix live-iso creation on aarch64 (bcl@redhat.com)
- Add test for running composer with --no-system-repos option (jikortus@redhat.com)
- [tests] Use functions for starting and stopping lorax-composer (jikortus@redhat.com)
- Makefile: Update bots target for moved GitHub project (sanne.raymaekers@gmail.com)
- Keep the zramctl utility from util-linux on boot.iso (mkolman@redhat.com)
- Skip kickstart tar test for fedora-30/tar scenario (atodorov@redhat.com)
- Enable fedora-30/tar test scenario (atodorov@redhat.com)
- [tests] Collect compose logs after each build (atodorov@redhat.com)
- [tests] Use a function to wait for compose to finish (jikortus@redhat.com)
- When launching AWS instances wait for the one we just launched (atodorov@redhat.com)
- tests: Add kickstart tar installation test (jikortus@redhat.com)
- tests: add option to disable kernel command line parameters check (jikortus@redhat.com)
- tests: Use a loop to wait for VM and sshd to start (bcl@redhat.com)
- creator.py: include dmsquash-live-ntfs by default (gmt@be-evil.net)
- Skip Azure test b/c misconfigured infra & creds (atodorov@redhat.com)
- tests: Drop tito from the Dockerfile.test (bcl@redhat.com)
- tests: Drop sort from compose types test (bcl@redhat.com)
- Revert "tests: Fix the order of liveimg-tar live-iso" (atodorov@redhat.com)
- New test: assert toml files in git workspace (atodorov@redhat.com)
* Tue Aug 20 2019 Brian C. Lane <bcl@redhat.com> 31.10-1
- tests: Update gpg key to fedora 32 (bcl@redhat.com)
- tests: Fix the order of liveimg-tar live-iso (bcl@redhat.com)
- tests: Use server-2.repo instead of single.repo (bcl@redhat.com)
- lorax-composer: Add support for dnf variables to repo sources (bcl@redhat.com)
- Use smarter multipath detection logic. (dlehman@redhat.com)
- tests: Expand test coverage of the v0 and v1 sources API (bcl@redhat.com)
- tests: Temporarily work around rpm and pylint issues (bcl@redhat.com)
- lorax-composer: Add v1 API for projects/source/ (bcl@redhat.com)
- Add /api/v1/ handler with no routes (bcl@redhat.com)
- Move common functions into pylorax.api.utils (bcl@redhat.com)
- Document the release process steps (bcl@redhat.com)
- lorax-composer: Add liveimg-tar image type (bcl@redhat.com)
- livemedia-creator: Use --compress-arg in mksquashfs (bcl@redhat.com)
- livemedia-creator: Remove unused --squashfs_args option (bcl@redhat.com)
- Only use repos with valid urls for test_server.py (bcl@redhat.com)
- lorax-composer: Clarify groups documentation (bcl@redhat.com)
* Mon Jul 29 2019 Brian C. Lane <bcl@redhat.com> 31.9-1
- New lorax documentation - 31.9 (bcl@redhat.com)
- Remove .build-id from install media (riehecky@fnal.gov)
- lorax-composer: Add squashfs_only False to all image types (bcl@redhat.com)
- tests: Update test_creator.py (bcl@redhat.com)
- docs: Add anaconda-live to fedora-livemedia.ks example (bcl@redhat.com)
- livemedia-creator: Use make_runtime for all runtime creation (bcl@redhat.com)
- livemedia-creator: Add support for a squashfs only runtime image (bcl@redhat.com)
- Update rst formatting. Refs #815 (atodorov@redhat.com)
- don't skip Xorg packages on s390x to allow local GUI installation under KVM (dan@danny.cz)
- Use binary mode to tail the file (bcl@redhat.com)
- Return most relevant log file from /compose/log (egoode@redhat.com)
- Use passwd --status for locked root account check (jikortus@redhat.com)
- tests: Use liveuser account for live-iso boot check (bcl@redhat.com)
- Mention python3-magic in HACKING.md (egoode@redhat.com)
- tests: Add check to make sure the compose actually finished (bcl@redhat.com)
- test: check the number of tests that ran (atodorov@redhat.com)
- lorax: Add debug log of command line options (riehecky@fnal.gov)
- lorax: provide runtime lorax config in debug log (riehecky@fnal.gov)
- Remove whitespace in v0_blueprints_new (jacobdkozol@gmail.com)
- Add test for VALID_BLUEPRINT_NAME check (jacobdkozol@gmail.com)
- Add seperate validation for blueprint names (jacobdkozol@gmail.com)
- Leave lscpu in the image for additional debugging (riehecky@fnal.gov)
- tests: set skip_if_unavailable in test repos (lars@karlitski.net)
- test/README.md: Add section explaining GitHub integration (lars@karlitski.net)
* Fri Jun 28 2019 Brian C. Lane <bcl@redhat.com> 31.8-1
- Also search for pxeboot kernel and initrd pairs (hadess@hadess.net)
- Assert that RuntimeErrors have correct messages (egoode@redhat.com)
- More descriptive error for a bad ref in repos.git (egoode@redhat.com)
- Remove unused shell script (atodorov@redhat.com)
- test: Output results for cockpit's log.html (lars@karlitski.net)
- Do not generate journal.xml from beakerlib (atodorov@redhat.com)
- tests: Add RUN_TESTS to Makefile so you can pass in targets (bcl@redhat.com)
- tests: Add tests for recipe checking functions (bcl@redhat.com)
- lorax-composer: Add basic case check to check_recipe_dict (bcl@redhat.com)
- lorax-composer: Add basic recipe checker function (bcl@redhat.com)
- Revert "test: Disable test_live_iso test" (lars@karlitski.net)
- test: Fix test_blueprint_sanity (lars@karlitski.net)
- tests: rpm now returns str, drop decode() call (bcl@redhat.com)
- tests: Drop libgit2 install from koji (bcl@redhat.com)
* Tue Jun 18 2019 Brian C. Lane <bcl@redhat.com> 31.7-1
- New lorax documentation - 31.7 (bcl@redhat.com)
- Update qemu arguments to work correctly with nographic (bcl@redhat.com)
- Switch to new toml library (bcl@redhat.com)
- composer-cli: Update diff support for customizations and repos.git (bcl@redhat.com)
- Add support for customizations and repos.git to /blueprints/diff/ (bcl@redhat.com)
- tests: Update custom-base with customizations (bcl@redhat.com)
- Move the v0 API documentation into the functions (bcl@redhat.com)
- Update the /api/v0/ route handling to use the flask_blueprints Blueprint class (bcl@redhat.com)
- Extend Flask Blueprint class to allow skipping routes (bcl@redhat.com)
- Remove PR template (atodorov@redhat.com)
- Increase retry count/sleep times when waiting for lorax to start (atodorov@redhat.com)
- Revert "remove the check for qemu-kvm" (atodorov@redhat.com)
- Revert "remove the check for /usr/bin/docker in the setup phase" (atodorov@redhat.com)
- [tests] Define unbound variables in test scripts (atodorov@redhat.com)
- [tests] Handle blueprints in setup_tests/teardown_tests correctly (atodorov@redhat.com)
- [tests] grep|cut for IP address in a more robust way (atodorov@redhat.com)
- Remove quotes around file test in make vm (atodorov@redhat.com)
- test: Don't wait on --sit when test succeeds (lars@karlitski.net)
- Monkey-patch beakerlib to fail on first assert (lars@karlitski.net)
- test_cli.sh: Return beakerlib's exit code (lars@karlitski.net)
- Don't send CORS headers (lars@karlitski.net)
- tests: Set BLUEPRINTS_DIR in all cases (lars@karlitski.net)
- tests: Fail on script errors (lars@karlitski.net)
- Add API integration test (lars@karlitski.net)
- composer: Set up a custom HTTP error handler (lars@karlitski.net)
- Split live-iso and qcow2 and update test scenario execution (atodorov@redhat.com)
- Configure $PACKAGE for beakerlib reports (atodorov@redhat.com)
- Use cloud credentials during test if they exist (atodorov@redhat.com)
- Don't execute compose/blueprint sanity tests in Travis CI (atodorov@redhat.com)
- test: Add --list option to test/check* scripts (lars@karlitski.net)
- test: Add --sit argument to check-* scripts (lars@karlitski.net)
- test: Custom main() function (lars@karlitski.net)
- Use ansible instead of awscli (jstodola@redhat.com)
- Fix path to generic.prm (jstodola@redhat.com)
- Update example fedora-livemedia.ks (bcl@redhat.com)
- Update composer live-iso template (bcl@redhat.com)
- test: Disable test_live_iso test (lars@karlitski.net)
- tests: Source lib.sh from the right directory (lars@karlitski.net)
- Revert "Add rpmfluff temporarily" (bcl@redhat.com)
- tests: Update tmux version to 2.9a (bcl@redhat.com)
- test: Install beakerlib on non-RHEL images (martin@piware.de)
- tests: Fail immediately when image build fails (lars@karlitski.net)
- test: Install beakerlib wehn running on rhel (lars@karlitski.net)
- test: Generalize fs resizing in vm.install (lars@karlitski.net)
- tests: Re-enable kvm (lars@karlitski.net)
- test: Fix vm.install to be idempotent (lars@karlitski.net)
- tests: Don't depend on kvm for tar and qcow2 tests (lars@karlitski.net)
- test_compose_tar: Work around selinux policy change (lars@karlitski.net)
- test_compose_tar: Be less verbose (lars@karlitski.net)
- test_compose_tar: Fix docker test (lars@karlitski.net)
- tests: Extract images to /var/tmp, not /tmp (lars@karlitski.net)
- Use Cockpit's test images and infrastructure (lars@karlitski.net)
- pylint: Remove unused false positive (lars@karlitski.net)
* Thu May 16 2019 Brian C. Lane <bcl@redhat.com> 31.6-1
- Add kernel to ext4-filesystem template (bcl@redhat.com)
- Create a lorax-docs package with the html docs (bcl@redhat.com)
- Add new documentation branches to index.rst (bcl@redhat.com)
* Tue May 07 2019 Brian C. Lane <bcl@redhat.com> 31.5-1
- Add python3-pycdlib to Dockerfile.test (bcl@redhat.com)
- Replace isoinfo with pycdlib (bcl@redhat.com)
- Add test for passing custom option on kernel command line (jikortus@redhat.com)
- Use verify_image function as a helper for generic tests (jikortus@redhat.com)
* Thu May 02 2019 Brian C. Lane <bcl@redhat.com> 31.4-1
- tests: Update openssh-server to v8.* (bcl@redhat.com)
- New lorax documentation - 31.4 (bcl@redhat.com)
- Change customizations.firewall to append items instead of replace (bcl@redhat.com)
- Update customizations.services documentation (bcl@redhat.com)
- lorax-composer: Add services support to blueprints (bcl@redhat.com)
- Add rpmfluff temporarily (bcl@redhat.com)
- lorax-composer: Add firewall support to blueprints (bcl@redhat.com)
- lorax-composer: Add locale support to blueprints (bcl@redhat.com)
- lorax-composer: Fix customizations when creating a recipe (bcl@redhat.com)
- Update docs for new timezone section (bcl@redhat.com)
- lorax-composer: Add timezone support to blueprint (bcl@redhat.com)
- Proposal for adding to the blueprint customizations (bcl@redhat.com)
- Add test for starting compose with deleted blueprint (jikortus@redhat.com)
- Update VMware info for VMware testing (chrobert@redhat.com)
- tests: Cleanup on failure of in_tempdir (bcl@redhat.com)
- Change [[modules]] to [[packages]] in tests (atodorov@redhat.com)
- Add new test to verify compose paths exist (atodorov@redhat.com)
- Add new sanity tests for blueprints (atodorov@redhat.com)
- Fixes for locked root account test (jikortus@redhat.com)
* Fri Apr 05 2019 Brian C. Lane <bcl@redhat.com> 31.3-1
- Add -iso-level 3 when the install.img is > 4GiB (bcl@redhat.com)
- Correct "recipes" use to "blueprints" in composer-cli description (kwalker@redhat.com)
- Fix keeping files on Amazon s3 (jstodola@redhat.com)
- Allow to keep objects in AWS (jstodola@redhat.com)
- Fix the google cloud boot console settings (dshea@redhat.com)
- Add a compose type for alibaba. (dshea@redhat.com)
- Add a new compose type for Hyper-V (dshea@redhat.com)
- Add a compose check for google cloud images. (dshea@redhat.com)
- Add a compose type for Google Compute Engine (dshea@redhat.com)
- Add a new output type, tar-disk. (dshea@redhat.com)
- Support compressing single files. (dshea@redhat.com)
- Add an option to align the image size to a multiplier. (dshea@redhat.com)
* Mon Apr 01 2019 Brian C. Lane <bcl@redhat.com> 31.2-1
- Add documentation references to lorax-composer service files (bcl@redhat.com)
- Add more tests for gitrpm.py (bcl@redhat.com)
- lorax-composer: Fix installing files from [[repos.git]] to / (bcl@redhat.com)
- New lorax documentation - 31.1 (bcl@redhat.com)
- Make it easier to generate docs for the next release (bcl@redhat.com)
* Tue Mar 26 2019 Brian C. Lane <bcl@redhat.com> 31.1-1
- qemu wasn't restoring the terminal if it was terminated early (bcl@redhat.com)
- Switch the --virt-uefi method to use SecureBoot (bcl@redhat.com)
- pylorax.ltmpl: Add a test for missing quotes (bcl@redhat.com)
- Don't remove chmem and lsmem from install.img (bcl@redhat.com)
- lorax-composer: pass customization.kernel append to extra_boot_args (bcl@redhat.com)
- Improve logging for template syntax errors (bcl@redhat.com)
- Add extra boot args to the livemedia-creator iso templates (bcl@redhat.com)
- lorax-composer: Add the ability to append to the kernel command-line (bcl@redhat.com)
- Add checks for disabled root account (jikortus@redhat.com)
- Update datastore for VMware testing (chrobert@redhat.com)
* Fri Mar 15 2019 Brian C. Lane <bcl@redhat.com> 31.0-1
- Add tests using repos.git in blueprints (bcl@redhat.com)
- Move git repo creation into tests/lib.py (bcl@redhat.com)
- rpmgit: catch potential errors while running git (bcl@redhat.com)
- tests: Add test for Recipe.freeze() function (bcl@redhat.com)
- Add repos.git support to lorax-composer builds (bcl@redhat.com)
- Add pylorax.api.gitrpm module and tests (bcl@redhat.com)
- Add support for [[repos.git]] section to blueprints (bcl@redhat.com)
- Update ppc64le isolabel to match x86_64 logic (bcl@redhat.com)
- Add blacklist_exceptions to multipath.conf (bcl@redhat.com)
- tests: Add python3-mock and python3-sphinx_rtd_theme (bcl@redhat.com)
- Use make ci inside test-in-copy target (atodorov@redhat.com)
- Allow overriding $CLI outside test scripts (atodorov@redhat.com)
- tests: Make it easier to update version globs (bcl@redhat.com)
- New test: Build live-iso and boot with KVM (atodorov@redhat.com)
- lorax-composer: Return UnknownBlueprint errors when using deleted blueprints (bcl@redhat.com)
- lorax-composer: Delete workspace copy when deleting blueprint (bcl@redhat.com)
- New test: Build qcow2 compose and test it with QEMU-KVM (atodorov@redhat.com)

View File

@ -18,8 +18,7 @@ for root, dnames, fnames in os.walk("share"):
# executable
data_files.append(("/usr/sbin", ["src/sbin/lorax", "src/sbin/mkefiboot",
"src/sbin/livemedia-creator", "src/sbin/mkksiso"]))
data_files.append(("/usr/bin", ["src/bin/image-minimizer",
"src/bin/composer-cli"]))
data_files.append(("/usr/bin", ["src/bin/image-minimizer"]))
# get the version
sys.path.insert(0, "src")
@ -42,7 +41,7 @@ setup(name="lorax",
url="http://www.github.com/weldr/lorax/",
download_url="http://www.github.com/weldr/lorax/releases/",
license="GPLv2+",
packages=["pylorax", "composer", "composer.cli"],
packages=["pylorax"],
package_dir={"" : "src"},
data_files=data_files
)

View File

@ -1,94 +0,0 @@
#!/usr/bin/python3
#
# composer-cli
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import os
import sys
# Disable pylint warnings for these, because it cannot deal with this file and
# the module both being called "composer"
from composer import vernum # pylint: disable=import-self
from composer.cli import main # pylint: disable=no-name-in-module
from composer.cli.cmdline import composer_cli_parser # pylint: disable=no-name-in-module
VERSION = "{0}-{1}".format(os.path.basename(sys.argv[0]), vernum)
def setup_logging(logfile=None):
""" Setup logging to console and to an optional logfile
:param logfile: Optional path to file to store logs in
:type logfile: None or str
"""
log.setLevel(logging.DEBUG)
sh = logging.StreamHandler()
sh.setLevel(logging.INFO)
fmt = logging.Formatter("%(asctime)s: %(message)s")
sh.setFormatter(fmt)
log.addHandler(sh)
if logfile != None:
fh = logging.FileHandler(filename=logfile)
fh.setLevel(logging.DEBUG)
fmt = logging.Formatter("%(asctime)s %(levelname)s %(name)s: %(message)s")
fh.setFormatter(fmt)
log.addHandler(fh)
if __name__ == '__main__':
# parse the arguments
arg_parser = composer_cli_parser()
opts = arg_parser.parse_args()
if opts.showver:
print(VERSION)
sys.exit(0)
if opts.logfile != None:
logpath = os.path.abspath(os.path.dirname(opts.logfile))
if not os.path.isdir(logpath):
os.makedirs(logpath)
setup_logging(opts.logfile)
log.debug("opts=%s", opts)
if len(opts.args) == 0:
log.error("Missing command")
sys.exit(1)
elif opts.args[0] == "help":
arg_parser.print_help()
sys.exit(0)
elif len(opts.args) == 1:
log.error("Missing %s sub-command", opts.args[0])
sys.exit(1)
errors = []
# Check to see if the socket exists and can be accessed
if not os.access(opts.socket, os.R_OK|os.W_OK):
errors.append("Cannot access '%s'. Is a WELDR API server (lorax-composer or "
"osbuild-composer) running, and is this user allowed to access it?" % opts.socket)
# No point in continuing if there are errors
if errors:
for e in errors:
log.error(e)
sys.exit(1)
sys.exit(main(opts))

View File

@ -1,27 +0,0 @@
#!/usr/bin/python
#
# composer-cli
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# get composer version
try:
import composer.version
except ImportError:
vernum = "devel"
else:
vernum = composer.version.num

View File

@ -1,60 +0,0 @@
#
# composer-cli
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
from composer.cli.blueprints import blueprints_cmd
from composer.cli.modules import modules_cmd
from composer.cli.projects import projects_cmd
from composer.cli.compose import compose_cmd
from composer.cli.sources import sources_cmd
from composer.cli.status import status_cmd
from composer.cli.upload import upload_cmd
from composer.cli.providers import providers_cmd
command_map = {
"blueprints": blueprints_cmd,
"modules": modules_cmd,
"projects": projects_cmd,
"compose": compose_cmd,
"sources": sources_cmd,
"status": status_cmd,
"upload": upload_cmd,
"providers": providers_cmd
}
def main(opts):
""" Main program execution
:param opts: Cmdline arguments
:type opts: argparse.Namespace
"""
# Making sure opts.args is not empty (thus, has a command and subcommand)
# is already handled in src/bin/composer-cli.
if opts.args[0] not in command_map:
log.error("Unknown command %s", opts.args[0])
return 1
else:
try:
return command_map[opts.args[0]](opts)
except Exception as e:
log.error(str(e))
return 1

View File

@ -1,582 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import os
from composer import http_client as client
from composer.cli.help import blueprints_help
from composer.cli.utilities import argify, frozen_toml_filename, toml_filename, handle_api_result
from composer.cli.utilities import packageNEVRA
def blueprints_cmd(opts):
"""Process blueprints commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
This dispatches the blueprints commands to a function
"""
cmd_map = {
"list": blueprints_list,
"show": blueprints_show,
"changes": blueprints_changes,
"diff": blueprints_diff,
"save": blueprints_save,
"delete": blueprints_delete,
"depsolve": blueprints_depsolve,
"push": blueprints_push,
"freeze": blueprints_freeze,
"tag": blueprints_tag,
"undo": blueprints_undo,
"workspace": blueprints_workspace
}
if opts.args[1] == "help" or opts.args[1] == "--help":
print(blueprints_help)
return 0
elif opts.args[1] not in cmd_map:
log.error("Unknown blueprints command: %s", opts.args[1])
return 1
return cmd_map[opts.args[1]](opts.socket, opts.api_version, opts.args[2:], opts.json)
def blueprints_list(socket_path, api_version, args, show_json=False):
"""Output the list of available blueprints
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints list
"""
api_route = client.api_url(api_version, "/blueprints/list")
result = client.get_url_json_unlimited(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
# "list" should output a plain list of identifiers, one per line.
print("\n".join(result["blueprints"]))
return rc
def blueprints_show(socket_path, api_version, args, show_json=False):
"""Show the blueprints, in TOML format
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints show <blueprint,...> Display the blueprint in TOML format.
Multiple blueprints will be separated by \n\n
"""
for blueprint in argify(args):
api_route = client.api_url(api_version, "/blueprints/info/%s?format=toml" % blueprint)
print(client.get_url_raw(socket_path, api_route) + "\n\n")
return 0
def blueprints_changes(socket_path, api_version, args, show_json=False):
"""Display the changes for each of the blueprints
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints changes <blueprint,...> Display the changes for each blueprint.
"""
def changes_total_fn(data):
"""Return the maximum number of possible changes"""
# Each blueprint can have a different total, return the largest one
return max([c["total"] for c in data["blueprints"]])
api_route = client.api_url(api_version, "/blueprints/changes/%s" % (",".join(argify(args))))
result = client.get_url_json_unlimited(socket_path, api_route, total_fn=changes_total_fn)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
for blueprint in result["blueprints"]:
print(blueprint["name"])
for change in blueprint["changes"]:
prettyCommitDetails(change)
return rc
def prettyCommitDetails(change, indent=4):
"""Print the blueprint's change in a nice way
:param change: The individual blueprint change dict
:type change: dict
:param indent: Number of spaces to indent
:type indent: int
"""
def revision():
if change["revision"]:
return " revision %d" % change["revision"]
else:
return ""
print(" " * indent + change["timestamp"] + " " + change["commit"] + revision())
print(" " * indent + change["message"] + "\n")
def blueprints_diff(socket_path, api_version, args, show_json=False):
"""Display the differences between 2 versions of a blueprint
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints diff <blueprint-name> Display the differences between 2 versions of a blueprint.
<from-commit> Commit hash or NEWEST
<to-commit> Commit hash, NEWEST, or WORKSPACE
"""
if len(args) == 0:
log.error("blueprints diff is missing the blueprint name, from commit, and to commit")
return 1
elif len(args) == 1:
log.error("blueprints diff is missing the from commit, and the to commit")
return 1
elif len(args) == 2:
log.error("blueprints diff is missing the to commit")
return 1
api_route = client.api_url(api_version, "/blueprints/diff/%s/%s/%s" % (args[0], args[1], args[2]))
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
for diff in result["diff"]:
print(pretty_diff_entry(diff))
return rc
def pretty_dict(d):
"""Return the dict as a human readable single line
:param d: key/values
:type d: dict
:returns: String of the dict's keys and values
:rtype: str
key="str", key="str1,str2", ...
"""
result = []
for k in d:
if type(d[k]) == type(""):
result.append('%s="%s"' % (k, d[k]))
elif type(d[k]) == type([]) and type(d[k][0]) == type(""):
result.append('%s="%s"' % (k, ", ".join(d[k])))
elif type(d[k]) == type([]) and type(d[k][0]) == type({}):
result.append('%s="%s"' % (k, pretty_dict(d[k])))
return " ".join(result)
def dict_names(lst):
"""Return comma-separated list of the dict's name/user fields
:param d: key/values
:type d: dict
:returns: String of the dict's keys and values
:rtype: str
root, norm
"""
if "user" in lst[0]:
field_name = "user"
elif "name" in lst[0]:
field_name = "name"
else:
# Use first fields in sorted keys
field_name = sorted(lst[0].keys())[0]
return ", ".join(d[field_name] for d in lst)
def pretty_diff_entry(diff):
"""Generate nice diff entry string.
:param diff: Difference entry dict
:type diff: dict
:returns: Nice string
"""
if diff["old"] and diff["new"]:
change = "Changed"
elif diff["new"] and not diff["old"]:
change = "Added"
elif diff["old"] and not diff["new"]:
change = "Removed"
else:
change = "Unknown"
if diff["old"]:
name = list(diff["old"].keys())[0]
elif diff["new"]:
name = list(diff["new"].keys())[0]
else:
name = "Unknown"
def details(diff):
if change == "Changed":
if type(diff["old"][name]) == type(""):
if name == "Description" or " " in diff["old"][name]:
return '"%s" -> "%s"' % (diff["old"][name], diff["new"][name])
else:
return "%s -> %s" % (diff["old"][name], diff["new"][name])
elif name in ["Module", "Package"]:
return "%s %s -> %s" % (diff["old"][name]["name"], diff["old"][name]["version"],
diff["new"][name]["version"])
elif type(diff["old"][name]) == type([]):
if type(diff["old"][name][0]) == type(""):
return "%s -> %s" % (" ".join(diff["old"][name]), " ".join(diff["new"][name]))
elif type(diff["old"][name][0]) == type({}):
# Lists of dicts are too long to display in detail, just show their names
return "%s -> %s" % (dict_names(diff["old"][name]), dict_names(diff["new"][name]))
elif type(diff["old"][name]) == type({}):
return "%s -> %s" % (pretty_dict(diff["old"][name]), pretty_dict(diff["new"][name]))
else:
return "Unknown"
elif change == "Added":
if name in ["Module", "Package"]:
return "%s %s" % (diff["new"][name]["name"], diff["new"][name]["version"])
elif name in ["Group"]:
return diff["new"][name]["name"]
elif type(diff["new"][name]) == type(""):
return diff["new"][name]
elif type(diff["new"][name]) == type([]):
if type(diff["new"][name][0]) == type(""):
return " ".join(diff["new"][name])
elif type(diff["new"][name][0]) == type({}):
# Lists of dicts are too long to display in detail, just show their names
return dict_names(diff["new"][name])
elif type(diff["new"][name]) == type({}):
return pretty_dict(diff["new"][name])
else:
return "unknown/todo: %s" % type(diff["new"][name])
elif change == "Removed":
if name in ["Module", "Package"]:
return "%s %s" % (diff["old"][name]["name"], diff["old"][name]["version"])
elif name in ["Group"]:
return diff["old"][name]["name"]
elif type(diff["old"][name]) == type(""):
return diff["old"][name]
elif type(diff["old"][name]) == type([]):
if type(diff["old"][name][0]) == type(""):
return " ".join(diff["old"][name])
elif type(diff["old"][name][0]) == type({}):
# Lists of dicts are too long to display in detail, just show their names
return dict_names(diff["old"][name])
elif type(diff["old"][name]) == type({}):
return pretty_dict(diff["old"][name])
else:
return "unknown/todo: %s" % type(diff["new"][name])
return change + " " + name + " " + details(diff)
def blueprints_save(socket_path, api_version, args, show_json=False):
"""Save the blueprint to a TOML file
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints save <blueprint,...> Save the blueprint to a file, <blueprint-name>.toml
"""
for blueprint in argify(args):
api_route = client.api_url(api_version, "/blueprints/info/%s?format=toml" % blueprint)
blueprint_toml = client.get_url_raw(socket_path, api_route)
with open(toml_filename(blueprint), "w") as f:
f.write(blueprint_toml)
return 0
def blueprints_delete(socket_path, api_version, args, show_json=False):
"""Delete a blueprint from the server
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
delete <blueprint> Delete a blueprint from the server
"""
api_route = client.api_url(api_version, "/blueprints/delete/%s" % args[0])
result = client.delete_url_json(socket_path, api_route)
return handle_api_result(result, show_json)[0]
def blueprints_depsolve(socket_path, api_version, args, show_json=False):
"""Display the packages needed to install the blueprint
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints depsolve <blueprint,...> Display the packages needed to install the blueprint.
"""
api_route = client.api_url(api_version, "/blueprints/depsolve/%s" % (",".join(argify(args))))
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
for blueprint in result["blueprints"]:
if blueprint["blueprint"].get("version", ""):
print("blueprint: %s v%s" % (blueprint["blueprint"]["name"], blueprint["blueprint"]["version"]))
else:
print("blueprint: %s" % (blueprint["blueprint"]["name"]))
for dep in blueprint["dependencies"]:
print(" " + packageNEVRA(dep))
return rc
def blueprints_push(socket_path, api_version, args, show_json=False):
"""Push a blueprint TOML file to the server, updating the blueprint
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
push <blueprint> Push a blueprint TOML file to the server.
"""
api_route = client.api_url(api_version, "/blueprints/new")
rval = 0
for blueprint in argify(args):
if not os.path.exists(blueprint):
log.error("Missing blueprint file: %s", blueprint)
continue
with open(blueprint, "r") as f:
blueprint_toml = f.read()
result = client.post_url_toml(socket_path, api_route, blueprint_toml)
if handle_api_result(result, show_json)[0]:
rval = 1
return rval
def blueprints_freeze(socket_path, api_version, args, show_json=False):
"""Handle the blueprints freeze commands
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints freeze <blueprint,...> Display the frozen blueprint's modules and packages.
blueprints freeze show <blueprint,...> Display the frozen blueprint in TOML format.
blueprints freeze save <blueprint,...> Save the frozen blueprint to a file, <blueprint-name>.frozen.toml.
"""
if args[0] == "show":
return blueprints_freeze_show(socket_path, api_version, args[1:], show_json)
elif args[0] == "save":
return blueprints_freeze_save(socket_path, api_version, args[1:], show_json)
if len(args) == 0:
log.error("freeze is missing the blueprint name")
return 1
api_route = client.api_url(api_version, "/blueprints/freeze/%s" % (",".join(argify(args))))
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
for entry in result["blueprints"]:
blueprint = entry["blueprint"]
if blueprint.get("version", ""):
print("blueprint: %s v%s" % (blueprint["name"], blueprint["version"]))
else:
print("blueprint: %s" % (blueprint["name"]))
for m in blueprint["modules"]:
print(" %s-%s" % (m["name"], m["version"]))
for p in blueprint["packages"]:
print(" %s-%s" % (p["name"], p["version"]))
return rc
def blueprints_freeze_show(socket_path, api_version, args, show_json=False):
"""Show the frozen blueprint in TOML format
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints freeze show <blueprint,...> Display the frozen blueprint in TOML format.
"""
if len(args) == 0:
log.error("freeze show is missing the blueprint name")
return 1
for blueprint in argify(args):
api_route = client.api_url(api_version, "/blueprints/freeze/%s?format=toml" % blueprint)
print(client.get_url_raw(socket_path, api_route))
return 0
def blueprints_freeze_save(socket_path, api_version, args, show_json=False):
"""Save the frozen blueprint to a TOML file
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints freeze save <blueprint,...> Save the frozen blueprint to a file, <blueprint-name>.frozen.toml.
"""
if len(args) == 0:
log.error("freeze save is missing the blueprint name")
return 1
for blueprint in argify(args):
api_route = client.api_url(api_version, "/blueprints/freeze/%s?format=toml" % blueprint)
blueprint_toml = client.get_url_raw(socket_path, api_route)
with open(frozen_toml_filename(blueprint), "w") as f:
f.write(blueprint_toml)
return 0
def blueprints_tag(socket_path, api_version, args, show_json=False):
"""Tag the most recent blueprint commit as a release
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints tag <blueprint> Tag the most recent blueprint commit as a release.
"""
api_route = client.api_url(api_version, "/blueprints/tag/%s" % args[0])
result = client.post_url(socket_path, api_route, "")
return handle_api_result(result, show_json)[0]
def blueprints_undo(socket_path, api_version, args, show_json=False):
"""Undo changes to a blueprint
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints undo <blueprint> <commit> Undo changes to a blueprint by reverting to the selected commit.
"""
if len(args) == 0:
log.error("undo is missing the blueprint name and commit hash")
return 1
elif len(args) == 1:
log.error("undo is missing commit hash")
return 1
api_route = client.api_url(api_version, "/blueprints/undo/%s/%s" % (args[0], args[1]))
result = client.post_url(socket_path, api_route, "")
return handle_api_result(result, show_json)[0]
def blueprints_workspace(socket_path, api_version, args, show_json=False):
"""Push the blueprint TOML to the temporary workspace storage
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
blueprints workspace <blueprint> Push the blueprint TOML to the temporary workspace storage.
"""
api_route = client.api_url(api_version, "/blueprints/workspace")
rval = 0
for blueprint in argify(args):
if not os.path.exists(blueprint):
log.error("Missing blueprint file: %s", blueprint)
continue
with open(blueprint, "r") as f:
blueprint_toml = f.read()
result = client.post_url_toml(socket_path, api_route, blueprint_toml)
if handle_api_result(result, show_json)[0]:
rval = 1
return rval

View File

@ -1,50 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import argparse
from composer import vernum
from composer.cli.help import epilog
VERSION = "{0}-{1}".format(os.path.basename(sys.argv[0]), vernum)
def composer_cli_parser():
""" Return the ArgumentParser for composer-cli"""
parser = argparse.ArgumentParser(description="Lorax Composer commandline tool",
epilog=epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
fromfile_prefix_chars="@")
parser.add_argument("-j", "--json", action="store_true", default=False,
help="Output the raw JSON response instead of the normal output.")
parser.add_argument("-s", "--socket", default="/run/weldr/api.socket", metavar="SOCKET",
help="Path to the socket file to listen on")
parser.add_argument("--log", dest="logfile", default=None, metavar="LOG",
help="Path to logfile (./composer-cli.log)")
parser.add_argument("-a", "--api", dest="api_version", default="1", metavar="APIVER",
help="API Version to use")
parser.add_argument("--test", dest="testmode", default=0, type=int, metavar="TESTMODE",
help="Pass test mode to compose. 1=Mock compose with fail. 2=Mock compose with finished.")
parser.add_argument("-V", action="store_true", dest="showver",
help="show program's version number and exit")
# Commands are implemented by parsing the remaining arguments outside of argparse
parser.add_argument('args', nargs=argparse.REMAINDER)
return parser

View File

@ -1,691 +0,0 @@
#
# Copyright (C) 2018-2020 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
from datetime import datetime
import sys
import json
import toml
from composer import http_client as client
from composer.cli.help import compose_help
from composer.cli.utilities import argify, handle_api_result, packageNEVRA, get_arg
def compose_cmd(opts):
"""Process compose commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
This dispatches the compose commands to a function
compose_cmd expects api to be passed. eg.
{"version": 1, "backend": "lorax-composer"}
"""
result = client.get_url_json(opts.socket, "/api/status")
# Get the api version and fall back to 0 if it fails.
api_version = result.get("api", "0")
backend = result.get("backend", "unknown")
api = {"version": api_version, "backend": backend}
cmd_map = {
"list": compose_list,
"status": compose_status,
"types": compose_types,
"start": compose_start,
"log": compose_log,
"cancel": compose_cancel,
"delete": compose_delete,
"info": compose_info,
"metadata": compose_metadata,
"results": compose_results,
"logs": compose_logs,
"image": compose_image,
"start-ostree": compose_ostree,
}
if opts.args[1] == "help" or opts.args[1] == "--help":
print(compose_help)
return 0
elif opts.args[1] not in cmd_map:
log.error("Unknown compose command: %s", opts.args[1])
return 1
return cmd_map[opts.args[1]](opts.socket, opts.api_version, opts.args[2:], opts.json, opts.testmode, api=api)
def get_size(args):
"""Return optional --size argument, and remaining args
:param args: list of arguments
:type args: list of strings
:returns: (args, size)
:rtype: tuple
- check size argument for int
- check other args for --size in wrong place
- raise error? Or just return 0?
- no size returns 0 in size
- multiply by 1024**2 to make it easier on users to specify large sizes
"""
args, value = get_arg(args, "--size", int)
value = value * 1024**2 if value is not None else 0
return (args, value)
def get_parent(args):
"""Return optional --parent argument, and remaining args
:param args: list of arguments
:type args: list of strings
:returns: (args, parent)
:rtype: tuple
"""
args, value = get_arg(args, "--parent")
value = value if value is not None else ""
return (args, value)
def get_ref(args):
"""Return optional --ref argument, and remaining args
:param args: list of arguments
:type args: list of strings
:returns: (args, parent)
:rtype: tuple
"""
args, value = get_arg(args, "--ref")
value = value if value is not None else ""
return (args, value)
def compose_list(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Return a simple list of compose identifiers"""
states = ("running", "waiting", "finished", "failed")
which = set()
if any(a not in states for a in args):
# TODO: error about unknown state
return 1
elif not args:
which.update(states)
else:
which.update(args)
results = []
if "running" in which or "waiting" in which:
api_route = client.api_url(api_version, "/compose/queue")
r = client.get_url_json(socket_path, api_route)
if "running" in which:
results += r["run"]
if "waiting" in which:
results += r["new"]
if "finished" in which:
api_route = client.api_url(api_version, "/compose/finished")
r = client.get_url_json(socket_path, api_route)
results += r["finished"]
if "failed" in which:
api_route = client.api_url(api_version, "/compose/failed")
r = client.get_url_json(socket_path, api_route)
results += r["failed"]
if results:
if show_json:
print(json.dumps(results, indent=4))
else:
list_fmt = "{id} {queue_status} {blueprint} {version} {compose_type}"
print("\n".join(list_fmt.format(**c) for c in results))
return 0
def compose_status(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Return the status of all known composes
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
This doesn't map directly to an API command, it combines the results from queue, finished,
and failed so raw JSON output is not available.
"""
def get_status(compose):
return {"id": compose["id"],
"blueprint": compose["blueprint"],
"version": compose["version"],
"compose_type": compose["compose_type"],
"image_size": compose["image_size"],
"status": compose["queue_status"],
"created": compose.get("job_created"),
"started": compose.get("job_started"),
"finished": compose.get("job_finished")}
# Sort the status in a specific order
def sort_status(a):
order = ["RUNNING", "WAITING", "FINISHED", "FAILED"]
return (order.index(a["status"]), a["blueprint"], a["version"], a["compose_type"])
status = []
# Get the composes currently in the queue
api_route = client.api_url(api_version, "/compose/queue")
result = client.get_url_json(socket_path, api_route)
status.extend(list(map(get_status, result["run"] + result["new"])))
# Get the list of finished composes
api_route = client.api_url(api_version, "/compose/finished")
result = client.get_url_json(socket_path, api_route)
status.extend(list(map(get_status, result["finished"])))
# Get the list of failed composes
api_route = client.api_url(api_version, "/compose/failed")
result = client.get_url_json(socket_path, api_route)
status.extend(list(map(get_status, result["failed"])))
# Sort them by status (running, waiting, finished, failed) and then by name and version.
status.sort(key=sort_status)
if show_json:
print(json.dumps(status, indent=4))
return 0
# Print them as UUID blueprint STATUS
for c in status:
if c["image_size"] > 0:
image_size = str(c["image_size"])
else:
image_size = ""
dt = datetime.fromtimestamp(c.get("finished") or c.get("started") or c.get("created"))
print("%s %-8s %s %-15s %s %-16s %s" % (c["id"], c["status"], dt.strftime("%c"), c["blueprint"],
c["version"], c["compose_type"], image_size))
def compose_types(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Return information about the supported compose types
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
Add additional details to types that are known to composer-cli. Raw JSON output does not
include this extra information.
"""
api_route = client.api_url(api_version, "/compose/types")
result = client.get_url_json(socket_path, api_route)
if show_json:
print(json.dumps(result, indent=4))
return 0
# output a plain list of identifiers, one per line
print("\n".join(t["name"] for t in result["types"] if t["enabled"]))
def compose_start(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Start a new compose using the selected blueprint and type
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: Set to 1 to simulate a failed compose, set to 2 to simulate a finished one.
:type testmode: int
:param api: Details about the API server, "version" and "backend"
:type api: dict
compose start [--size XXX] <blueprint-name> <compose-type> [<image-name> <provider> <profile> | <image-name> <profile.toml>]
"""
if api == None:
log.error("Missing api version/backend")
return 1
# Get the optional size before checking other parameters
try:
args, size = get_size(args)
except (RuntimeError, ValueError) as e:
log.error(str(e))
return 1
if len(args) == 0:
log.error("start is missing the blueprint name and output type")
return 1
if len(args) == 1:
log.error("start is missing the output type")
return 1
if len(args) == 3:
log.error("start is missing the provider and profile details")
return 1
config = {
"blueprint_name": args[0],
"compose_type": args[1],
"branch": "master"
}
if size > 0:
if api["backend"] == "lorax-composer":
log.warning("lorax-composer does not support --size, it will be ignored.")
else:
config["size"] = size
if len(args) == 4:
config["upload"] = {"image_name": args[2]}
# profile TOML file (maybe)
try:
config["upload"].update(toml.load(args[3]))
except toml.TomlDecodeError as e:
log.error(str(e))
return 1
elif len(args) == 5:
config["upload"] = {
"image_name": args[2],
"provider": args[3],
"profile": args[4]
}
if testmode:
test_url = "?test=%d" % testmode
else:
test_url = ""
api_route = client.api_url(api_version, "/compose" + test_url)
result = client.post_url_json(socket_path, api_route, json.dumps(config))
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
print("Compose %s added to the queue" % result["build_id"])
if "upload_id" in result and result["upload_id"]:
print ("Upload %s added to the upload queue" % result["upload_id"])
return rc
def compose_ostree(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Start a new ostree compose using the selected blueprint and type
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: Set to 1 to simulate a failed compose, set to 2 to simulate a finished one.
:type testmode: int
:param api: Details about the API server, "version" and "backend"
:type api: dict
compose start-ostree [--size XXXX] [--parent PARENT] [--ref REF] <BLUEPRINT> <TYPE> [<IMAGE-NAME> <PROFILE.TOML>]
"""
if api == None:
log.error("Missing api version/backend")
return 1
if api["backend"] == "lorax-composer":
log.warning("lorax-composer doesn not support start-ostree.")
return 1
# Get the optional size before checking other parameters
try:
args, size = get_size(args)
args, parent = get_parent(args)
args, ref = get_ref(args)
except (RuntimeError, ValueError) as e:
log.error(str(e))
return 1
if len(args) == 0:
log.error("start-ostree is missing the blueprint name, output type, and ostree details")
return 1
if len(args) == 1:
log.error("start-ostree is missing the output type")
return 1
if len(args) == 3:
log.error("start-ostree is missing the provider TOML file")
return 1
config = {
"blueprint_name": args[0],
"compose_type": args[1],
"branch": "master",
"ostree": {"ref": ref, "parent": parent},
}
if size > 0:
config["size"] = size
if len(args) == 4:
config["upload"] = {"image_name": args[2]}
# profile TOML file (maybe)
try:
config["upload"].update(toml.load(args[3]))
except toml.TomlDecodeError as e:
log.error(str(e))
return 1
if testmode:
test_url = "?test=%d" % testmode
else:
test_url = ""
api_route = client.api_url(api_version, "/compose" + test_url)
result = client.post_url_json(socket_path, api_route, json.dumps(config))
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
print("Compose %s added to the queue" % result["build_id"])
if "upload_id" in result and result["upload_id"]:
print ("Upload %s added to the upload queue" % result["upload_id"])
return rc
def compose_log(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Show the last part of the compose log
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose log <uuid> [<size>kB]
This will display the last 1kB of the compose's log file. Can be used to follow progress
during the build.
"""
if len(args) == 0:
log.error("log is missing the compose build id")
return 1
if len(args) == 2:
try:
log_size = int(args[1])
except ValueError:
log.error("Log size must be an integer.")
return 1
else:
log_size = 1024
api_route = client.api_url(api_version, "/compose/log/%s?size=%d" % (args[0], log_size))
try:
result = client.get_url_raw(socket_path, api_route)
except RuntimeError as e:
print(str(e))
return 1
print(result)
return 0
def compose_cancel(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Cancel a running compose
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose cancel <uuid>
This will cancel a running compose. It does nothing if the compose has finished.
"""
if len(args) == 0:
log.error("cancel is missing the compose build id")
return 1
api_route = client.api_url(api_version, "/compose/cancel/%s" % args[0])
result = client.delete_url_json(socket_path, api_route)
return handle_api_result(result, show_json)[0]
def compose_delete(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Delete a finished compose's results
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose delete <uuid,...>
Delete the listed compose results. It will only delete results for composes that have finished
or failed, not a running compose.
"""
if len(args) == 0:
log.error("delete is missing the compose build id")
return 1
api_route = client.api_url(api_version, "/compose/delete/%s" % (",".join(argify(args))))
result = client.delete_url_json(socket_path, api_route)
return handle_api_result(result, show_json)[0]
def compose_info(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Return detailed information about the compose
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose info <uuid>
This returns information about the compose, including the blueprint and the dependencies.
"""
if len(args) == 0:
log.error("info is missing the compose build id")
return 1
api_route = client.api_url(api_version, "/compose/info/%s" % args[0])
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
if result["image_size"] > 0:
image_size = str(result["image_size"])
else:
image_size = ""
print("%s %-8s %-15s %s %-16s %s" % (result["id"],
result["queue_status"],
result["blueprint"]["name"],
result["blueprint"]["version"],
result["compose_type"],
image_size))
print("Packages:")
for p in result["blueprint"]["packages"]:
print(" %s-%s" % (p["name"], p["version"]))
print("Modules:")
for m in result["blueprint"]["modules"]:
print(" %s-%s" % (m["name"], m["version"]))
print("Dependencies:")
for d in result["deps"]["packages"]:
print(" " + packageNEVRA(d))
return rc
def compose_metadata(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Download a tar file of the compose's metadata
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose metadata <uuid>
Saves the metadata as uuid-metadata.tar
"""
if len(args) == 0:
log.error("metadata is missing the compose build id")
return 1
api_route = client.api_url(api_version, "/compose/metadata/%s" % args[0])
try:
rc = client.download_file(socket_path, api_route)
except RuntimeError as e:
print(str(e))
rc = 1
return rc
def compose_results(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Download a tar file of the compose's results
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose results <uuid>
The results includes the metadata, output image, and logs.
It is saved as uuid.tar
"""
if len(args) == 0:
log.error("results is missing the compose build id")
return 1
api_route = client.api_url(api_version, "/compose/results/%s" % args[0])
try:
rc = client.download_file(socket_path, api_route, sys.stdout.isatty())
except RuntimeError as e:
print(str(e))
rc = 1
return rc
def compose_logs(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Download a tar of the compose's logs
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose logs <uuid>
Saves the logs as uuid-logs.tar
"""
if len(args) == 0:
log.error("logs is missing the compose build id")
return 1
api_route = client.api_url(api_version, "/compose/logs/%s" % args[0])
try:
rc = client.download_file(socket_path, api_route, sys.stdout.isatty())
except RuntimeError as e:
print(str(e))
rc = 1
return rc
def compose_image(socket_path, api_version, args, show_json=False, testmode=0, api=None):
"""Download the compose's output image
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
compose image <uuid>
This downloads only the result image, saving it as the image name, which depends on the type
of compose that was selected.
"""
if len(args) == 0:
log.error("logs is missing the compose build id")
return 1
api_route = client.api_url(api_version, "/compose/image/%s" % args[0])
try:
rc = client.download_file(socket_path, api_route, sys.stdout.isatty())
except RuntimeError as e:
print(str(e))
rc = 1
return rc

View File

@ -1,179 +0,0 @@
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Documentation for the commands
compose_help = """
compose start [--size XXXX] <BLUEPRINT> <TYPE> [<IMAGE-NAME> <PROVIDER> <PROFILE> | <IMAGE-NAME> <PROFILE.TOML>]
Start a compose using the selected blueprint and output type. Optionally start an upload.
--size is supported by osbuild-composer, and is in MiB.
compose start-ostree [--size XXXX] [--parent PARENT] [--ref REF] <BLUEPRINT> <TYPE> [<IMAGE-NAME> <PROFILE.TOML>]
Start an ostree compose using the selected blueprint and output type. Optionally start an upload. This command
is only supported by osbuild-composer. --size is in MiB.
compose types
List the supported output types.
compose status
List the status of all running and finished composes.
compose list [waiting|running|finished|failed]
List basic information about composes.
compose log <UUID> [<SIZE>]
Show the last SIZE kB of the compose log.
compose cancel <UUID>
Cancel a running compose and delete any intermediate results.
compose delete <UUID,...>
Delete the listed compose results.
compose info <UUID>
Show detailed information on the compose.
compose metadata <UUID>
Download the metadata use to create the compose to <uuid>-metadata.tar
compose logs <UUID>
Download the compose logs to <uuid>-logs.tar
compose results <UUID>
Download all of the compose results; metadata, logs, and image to <uuid>.tar
compose image <UUID>
Download the output image from the compose. Filename depends on the type.
"""
blueprints_help = """
blueprints list
List the names of the available blueprints.
blueprints show <BLUEPRINT,...>
Display the blueprint in TOML format.
blueprints changes <BLUEPRINT,...>
Display the changes for each blueprint.
blueprints diff <BLUEPRINT> <FROM-COMMIT> <TO-COMMIT>
Display the differences between 2 versions of a blueprint.
FROM-COMMIT can be a commit hash or NEWEST
TO-COMMIT can be a commit hash, NEWEST, or WORKSPACE
blueprints save <BLUEPRINT,...>
Save the blueprint to a file, <BLUEPRINT>.toml
blueprints delete <BLUEPRINT>
Delete a blueprint from the server
blueprints depsolve <BLUEPRINT,...>
Display the packages needed to install the blueprint.
blueprints push <BLUEPRINT>
Push a blueprint TOML file to the server.
blueprints freeze <BLUEPRINT,...>
Display the frozen blueprint's modules and packages.
blueprints freeze show <BLUEPRINT,...>
Display the frozen blueprint in TOML format.
blueprints freeze save <BLUEPRINT,...>
Save the frozen blueprint to a file, <blueprint-name>.frozen.toml.
blueprints tag <BLUEPRINT>
Tag the most recent blueprint commit as a release.
blueprints undo <BLUEPRINT> <COMMIT>
Undo changes to a blueprint by reverting to the selected commit.
blueprints workspace <BLUEPRINT>
Push the blueprint TOML to the temporary workspace storage.
"""
modules_help = """
modules list
List the available modules.
"""
projects_help = """
projects list
List the available projects.
projects info <PROJECT,...>
Show details about the listed projects.
"""
sources_help = """
sources list
List the available sources
sources info <SOURCE-NAME,...>
Details about the source.
sources add <SOURCE.TOML>
Add a package source to the server.
sources change <SOURCE.TOML>
Change an existing source
sources delete <SOURCE-NAME>
Delete a package source.
"""
status_help = """
status show Show API server status.
"""
upload_help = """
upload info <UPLOAD-UUID>
Details about an upload
upload start <BUILD-UUID> <IMAGE-NAME> [<PROVIDER> <PROFILE>|<PROFILE.TOML>]
Upload a build image to the selected provider.
upload log <UPLOAD-UUID>
Show the upload log
upload cancel <UPLOAD-UUID>
Cancel an upload with that is queued or in progress
upload delete <UPLOAD-UUID>
Delete the upload and remove it from the build
upload reset <UPLOAD-UUID>
Reset the upload so that it can be tried again
"""
providers_help = """
providers list <PROVIDER>
List the available providers, or list the <provider's> available profiles
providers show <PROVIDER> <PROFILE>
show the details of a specific provider's profile
providers push <PROFILE.TOML>
Add a new profile, or overwrite an existing one
providers save <PROVIDER> <PROFILE>
Save the profile's details to a TOML file named <PROFILE>.toml
providers delete <PROVIDER> <PROFILE>
Delete a profile from a provider
"""
epilog = compose_help + blueprints_help + modules_help + projects_help \
+ sources_help + status_help + upload_help + providers_help

View File

@ -1,48 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
from composer import http_client as client
from composer.cli.help import modules_help
from composer.cli.utilities import handle_api_result
def modules_cmd(opts):
"""Process modules commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
"""
if opts.args[1] == "help" or opts.args[1] == "--help":
print(modules_help)
return 0
elif opts.args[1] != "list":
log.error("Unknown modules command: %s", opts.args[1])
return 1
api_route = client.api_url(opts.api_version, "/modules/list")
result = client.get_url_json_unlimited(opts.socket, api_route)
(rc, exit_now) = handle_api_result(result, opts.json)
if exit_now:
return rc
# "list" should output a plain list of identifiers, one per line.
print("\n".join(r["name"] for r in result["modules"]))
return rc

View File

@ -1,110 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import textwrap
from composer import http_client as client
from composer.cli.help import projects_help
from composer.cli.utilities import handle_api_result
def projects_cmd(opts):
"""Process projects commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
"""
cmd_map = {
"list": projects_list,
"info": projects_info,
}
if opts.args[1] == "help" or opts.args[1] == "--help":
print(projects_help)
return 0
elif opts.args[1] not in cmd_map:
log.error("Unknown projects command: %s", opts.args[1])
return 1
return cmd_map[opts.args[1]](opts.socket, opts.api_version, opts.args[2:], opts.json)
def projects_list(socket_path, api_version, args, show_json=False):
"""Output the list of available projects
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
projects list
"""
api_route = client.api_url(api_version, "/projects/list")
result = client.get_url_json_unlimited(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
for proj in result["projects"]:
for k in [field for field in ("name", "summary", "homepage", "description") if proj[field]]:
print("%s: %s" % (k.title(), textwrap.fill(proj[k], subsequent_indent=" " * (len(k)+2))))
print("\n\n")
return rc
def projects_info(socket_path, api_version, args, show_json=False):
"""Output info on a list of projects
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
projects info <project,...>
"""
if len(args) == 0:
log.error("projects info is missing the packages")
return 1
api_route = client.api_url(api_version, "/projects/info/%s" % ",".join(args))
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
for proj in result["projects"]:
for k in [field for field in ("name", "summary", "homepage", "description") if proj[field]]:
print("%s: %s" % (k.title(), textwrap.fill(proj[k], subsequent_indent=" " * (len(k)+2))))
print("Builds: ")
for build in proj["builds"]:
print(" %s%s-%s.%s at %s for %s" % ("" if not build["epoch"] else str(build["epoch"]) + ":",
build["source"]["version"],
build["release"],
build["arch"],
build["build_time"],
build["changelog"]))
print("")
return rc

View File

@ -1,323 +0,0 @@
#
# Copyright (C) 2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import json
import toml
import os
from composer import http_client as client
from composer.cli.help import providers_help
from composer.cli.utilities import handle_api_result, toml_filename
def providers_cmd(opts):
"""Process providers commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
This dispatches the providers commands to a function
"""
cmd_map = {
"list": providers_list,
"info": providers_info,
"show": providers_show,
"push": providers_push,
"save": providers_save,
"delete": providers_delete,
"template": providers_template
}
if opts.args[1] == "help" or opts.args[1] == "--help":
print(providers_help)
return 0
elif opts.args[1] not in cmd_map:
log.error("Unknown providers command: %s", opts.args[1])
return 1
return cmd_map[opts.args[1]](opts.socket, opts.api_version, opts.args[2:], opts.json, opts.testmode)
def providers_list(socket_path, api_version, args, show_json=False, testmode=0):
"""Return the list of providers
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
providers list
"""
api_route = client.api_url(api_version, "/upload/providers")
r = client.get_url_json(socket_path, api_route)
results = r["providers"]
if not results:
return 0
if show_json:
print(json.dumps(results, indent=4))
else:
if len(args) == 1:
if args[0] not in results:
log.error("%s is not a valid provider", args[0])
return 1
print("\n".join(sorted(results[args[0]]["profiles"].keys())))
else:
print("\n".join(sorted(results.keys())))
return 0
def providers_info(socket_path, api_version, args, show_json=False, testmode=0):
"""Show information about each provider
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
providers info <PROVIDER>
"""
if len(args) == 0:
log.error("info is missing the provider name")
return 1
api_route = client.api_url(api_version, "/upload/providers")
r = client.get_url_json(socket_path, api_route)
results = r["providers"]
if not results:
return 0
if show_json:
print(json.dumps(results, indent=4))
else:
if args[0] not in results:
log.error("%s is not a valid provider", args[0])
return 1
p = results[args[0]]
print("%s supports these image types: %s" % (p["display"], ", ".join(p["supported_types"])))
print("Settings:")
for k in p["settings-info"]:
f = p["settings-info"][k]
print(" %-20s: %s is a %s" % (k, f["display"], f["type"]))
return 0
def providers_show(socket_path, api_version, args, show_json=False, testmode=0):
"""Return details about a provider
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
providers show <provider> <profile>
"""
if len(args) == 0:
log.error("show is missing the provider name")
return 1
if len(args) == 1:
log.error("show is missing the profile name")
return 1
api_route = client.api_url(api_version, "/upload/providers")
r = client.get_url_json(socket_path, api_route)
results = r["providers"]
if not results:
return 0
if show_json:
print(json.dumps(results, indent=4))
else:
if args[0] not in results:
log.error("%s is not a valid provider", args[0])
return 1
if args[1] not in results[args[0]]["profiles"]:
log.error("%s is not a valid %s profile", args[1], args[0])
return 1
# Print the details for this profile
# fields are different for each provider, so we just print out the key:values
for k in results[args[0]]["profiles"][args[1]]:
print("%s: %s" % (k, results[args[0]]["profiles"][args[1]][k]))
return 0
def providers_push(socket_path, api_version, args, show_json=False, testmode=0):
"""Add a new provider profile or overwrite an existing one
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
providers push <profile.toml>
"""
if len(args) == 0:
log.error("push is missing the profile TOML file")
return 1
if not os.path.exists(args[0]):
log.error("Missing profile TOML file: %s", args[0])
return 1
api_route = client.api_url(api_version, "/upload/providers/save")
profile = toml.load(args[0])
result = client.post_url_json(socket_path, api_route, json.dumps(profile))
return handle_api_result(result, show_json)[0]
def providers_save(socket_path, api_version, args, show_json=False, testmode=0):
"""Save a provider's profile to a TOML file
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
providers save <provider> <profile>
"""
if len(args) == 0:
log.error("save is missing the provider name")
return 1
if len(args) == 1:
log.error("save is missing the profile name")
return 1
api_route = client.api_url(api_version, "/upload/providers")
r = client.get_url_json(socket_path, api_route)
results = r["providers"]
if not results:
return 0
if show_json:
print(json.dumps(results, indent=4))
else:
if args[0] not in results:
log.error("%s is not a valid provider", args[0])
return 1
if args[1] not in results[args[0]]["profiles"]:
log.error("%s is not a valid %s profile", args[1], args[0])
return 1
profile = {
"provider": args[0],
"profile": args[1],
"settings": results[args[0]]["profiles"][args[1]]
}
with open(toml_filename(args[1]), "w") as f:
f.write(toml.dumps(profile))
return 0
def providers_delete(socket_path, api_version, args, show_json=False, testmode=0):
"""Delete a profile from a provider
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
providers delete <provider> <profile>
"""
if len(args) == 0:
log.error("delete is missing the provider name")
return 1
if len(args) == 1:
log.error("delete is missing the profile name")
return 1
api_route = client.api_url(api_version, "/upload/providers/delete/%s/%s" % (args[0], args[1]))
result = client.delete_url_json(socket_path, api_route)
return handle_api_result(result, show_json)[0]
def providers_template(socket_path, api_version, args, show_json=False, testmode=0):
"""Return a TOML template for setting the provider's fields
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
providers template <provider>
"""
if len(args) == 0:
log.error("template is missing the provider name")
return 1
api_route = client.api_url(api_version, "/upload/providers")
r = client.get_url_json(socket_path, api_route)
results = r["providers"]
if not results:
return 0
if show_json:
print(json.dumps(results, indent=4))
return 0
if args[0] not in results:
log.error("%s is not a valid provider", args[0])
return 1
template = {"provider": args[0]}
settings = results[args[0]]["settings-info"]
template["settings"] = dict([(k, settings[k]["display"]) for k in settings])
print(toml.dumps(template))
return 0

View File

@ -1,153 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import os
from composer import http_client as client
from composer.cli.help import sources_help
from composer.cli.utilities import argify, handle_api_result
def sources_cmd(opts):
"""Process sources commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
"""
cmd_map = {
"list": sources_list,
"info": sources_info,
"add": sources_add,
"change": sources_add,
"delete": sources_delete,
}
if opts.args[1] == "help" or opts.args[1] == "--help":
print(sources_help)
return 0
elif opts.args[1] not in cmd_map:
log.error("Unknown sources command: %s", opts.args[1])
return 1
return cmd_map[opts.args[1]](opts.socket, opts.api_version, opts.args[2:], opts.json)
def sources_list(socket_path, api_version, args, show_json=False):
"""Output the list of available sources
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
sources list
"""
api_route = client.api_url(api_version, "/projects/source/list")
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
# "list" should output a plain list of identifiers, one per line.
print("\n".join(result["sources"]))
return rc
def sources_info(socket_path, api_version, args, show_json=False):
"""Output info on a list of projects
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
sources info <source-name>
"""
if len(args) == 0:
log.error("sources info is missing the name of the source")
return 1
if show_json:
api_route = client.api_url(api_version, "/projects/source/info/%s" % ",".join(args))
result = client.get_url_json(socket_path, api_route)
rc = handle_api_result(result, show_json)[0]
else:
api_route = client.api_url(api_version, "/projects/source/info/%s?format=toml" % ",".join(args))
try:
result = client.get_url_raw(socket_path, api_route)
print(result)
rc = 0
except RuntimeError as e:
print(str(e))
rc = 1
return rc
def sources_add(socket_path, api_version, args, show_json=False):
"""Add or change a source
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
sources add <source.toml>
"""
api_route = client.api_url(api_version, "/projects/source/new")
rval = 0
for source in argify(args):
if not os.path.exists(source):
log.error("Missing source file: %s", source)
continue
with open(source, "r") as f:
source_toml = f.read()
result = client.post_url_toml(socket_path, api_route, source_toml)
if handle_api_result(result, show_json)[0]:
rval = 1
return rval
def sources_delete(socket_path, api_version, args, show_json=False):
"""Delete a source
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
sources delete <source-name>
"""
api_route = client.api_url(api_version, "/projects/source/delete/%s" % args[0])
result = client.delete_url_json(socket_path, api_route)
return handle_api_result(result, show_json)[0]

View File

@ -1,56 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
from composer import http_client as client
from composer.cli.help import status_help
from composer.cli.utilities import handle_api_result
def status_cmd(opts):
"""Process status commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
"""
if opts.args[1] == "help" or opts.args[1] == "--help":
print(status_help)
return 0
elif opts.args[1] != "show":
log.error("Unknown status command: %s", opts.args[1])
return 1
result = client.get_url_json(opts.socket, "/api/status")
(rc, exit_now) = handle_api_result(result, opts.json)
if exit_now:
return rc
print("API server status:")
print(" Database version: " + result["db_version"])
print(" Database supported: %s" % result["db_supported"])
print(" Schema version: " + result["schema_version"])
print(" API version: " + result["api"])
print(" Backend: " + result["backend"])
print(" Build: " + result["build"])
if result["msgs"]:
print("Error messages:")
print("\n".join([" " + r for r in result["msgs"]]))
return rc

View File

@ -1,277 +0,0 @@
#
# Copyright (C) 2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import json
import toml
import os
from composer import http_client as client
from composer.cli.help import upload_help
from composer.cli.utilities import handle_api_result
def upload_cmd(opts):
"""Process upload commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
This dispatches the upload commands to a function
"""
cmd_map = {
"list": upload_list,
"info": upload_info,
"start": upload_start,
"log": upload_log,
"cancel": upload_cancel,
"delete": upload_delete,
"reset": upload_reset,
}
if opts.args[1] == "help" or opts.args[1] == "--help":
print(upload_help)
return 0
elif opts.args[1] not in cmd_map:
log.error("Unknown upload command: %s", opts.args[1])
return 1
return cmd_map[opts.args[1]](opts.socket, opts.api_version, opts.args[2:], opts.json, opts.testmode)
def upload_list(socket_path, api_version, args, show_json=False, testmode=0):
"""Return the composes and their associated upload uuids and status
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
upload list
"""
api_route = client.api_url(api_version, "/compose/finished")
r = client.get_url_json(socket_path, api_route)
results = r["finished"]
if not results:
return 0
if show_json:
print(json.dumps(results, indent=4))
else:
compose_fmt = "{id} {queue_status} {blueprint} {version} {compose_type}"
upload_fmt = ' {uuid} "{image_name}" {provider_name} {status}'
for c in results:
print(compose_fmt.format(**c))
print("\n".join(upload_fmt.format(**u) for u in c["uploads"]))
print()
return 0
def upload_info(socket_path, api_version, args, show_json=False, testmode=0):
"""Return detailed information about the upload
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
upload info <uuid>
This returns information about the upload, including uuid, name, status, service, and image.
"""
if len(args) == 0:
log.error("info is missing the upload uuid")
return 1
api_route = client.api_url(api_version, "/upload/info/%s" % args[0])
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
image_path = result["upload"]["image_path"]
print("%s %-8s %-15s %-8s %s" % (result["upload"]["uuid"],
result["upload"]["status"],
result["upload"]["image_name"],
result["upload"]["provider_name"],
os.path.basename(image_path) if image_path else "UNFINISHED"))
return rc
def upload_start(socket_path, api_version, args, show_json=False, testmode=0):
"""Start upload up a build uuid image
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
upload start <build-uuid> <image-name> [<provider> <profile> | <profile.toml>]
"""
if len(args) == 0:
log.error("start is missing the compose build id")
return 1
if len(args) == 1:
log.error("start is missing the image name")
return 1
if len(args) == 2:
log.error("start is missing the provider and profile details")
return 1
body = {"image_name": args[1]}
if len(args) == 3:
try:
body.update(toml.load(args[2]))
except toml.TomlDecodeError as e:
log.error(str(e))
return 1
elif len(args) == 4:
body["provider"] = args[2]
body["profile"] = args[3]
else:
log.error("start has incorrect number of arguments")
return 1
api_route = client.api_url(api_version, "/compose/uploads/schedule/%s" % args[0])
result = client.post_url_json(socket_path, api_route, json.dumps(body))
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
print("Upload %s added to the queue" % result["upload_id"])
return rc
def upload_log(socket_path, api_version, args, show_json=False, testmode=0):
"""Return the upload log
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
upload log <build-uuid>
"""
if len(args) == 0:
log.error("log is missing the upload uuid")
return 1
api_route = client.api_url(api_version, "/upload/log/%s" % args[0])
result = client.get_url_json(socket_path, api_route)
(rc, exit_now) = handle_api_result(result, show_json)
if exit_now:
return rc
print("Upload log for %s:\n" % result["upload_id"])
print(result["log"])
return 0
def upload_cancel(socket_path, api_version, args, show_json=False, testmode=0):
"""Cancel the queued or running upload
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
upload cancel <build-uuid>
"""
if len(args) == 0:
log.error("cancel is missing the upload uuid")
return 1
api_route = client.api_url(api_version, "/upload/cancel/%s" % args[0])
result = client.delete_url_json(socket_path, api_route)
return handle_api_result(result, show_json)[0]
def upload_delete(socket_path, api_version, args, show_json=False, testmode=0):
"""Delete an upload and remove it from the build
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
upload delete <build-uuid>
"""
if len(args) == 0:
log.error("delete is missing the upload uuid")
return 1
api_route = client.api_url(api_version, "/upload/delete/%s" % args[0])
result = client.delete_url_json(socket_path, api_route)
return handle_api_result(result, show_json)[0]
def upload_reset(socket_path, api_version, args, show_json=False, testmode=0):
"""Reset the upload and execute it again
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param api_version: Version of the API to talk to. eg. "0"
:type api_version: str
:param args: List of remaining arguments from the cmdline
:type args: list of str
:param show_json: Set to True to show the JSON output instead of the human readable output
:type show_json: bool
:param testmode: unused in this function
:type testmode: int
upload reset <build-uuid>
"""
if len(args) == 0:
log.error("reset is missing the upload uuid")
return 1
api_route = client.api_url(api_version, "/upload/reset/%s" % args[0])
result = client.post_url_json(socket_path, api_route, json.dumps({}))
return handle_api_result(result, show_json)[0]

View File

@ -1,123 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import json
def argify(args):
"""Take a list of human args and return a list with each item
:param args: list of strings with possible commas and spaces
:type args: list of str
:returns: List of all the items
:rtype: list of str
Examples:
["one,two", "three", ",four", ",five,"] returns ["one", "two", "three", "four", "five"]
"""
return [i for i in [arg for entry in args for arg in entry.split(",")] if i]
def toml_filename(blueprint_name):
"""Convert a blueprint name into a filename.toml
:param blueprint_name: The blueprint's name
:type blueprint_name: str
:returns: The blueprint name with ' ' converted to - and .toml appended
:rtype: str
"""
return blueprint_name.replace(" ", "-") + ".toml"
def frozen_toml_filename(blueprint_name):
"""Convert a blueprint name into a filename.toml
:param blueprint_name: The blueprint's name
:type blueprint_name: str
:returns: The blueprint name with ' ' converted to - and .toml appended
:rtype: str
"""
return blueprint_name.replace(" ", "-") + ".frozen.toml"
def handle_api_result(result, show_json=False):
"""Log any errors, return the correct value
:param result: JSON result from the http query
:type result: dict
:rtype: tuple
:returns: (rc, should_exit_now)
Return the correct rc for the program (0 or 1), and whether or
not to continue processing the results.
"""
if show_json:
print(json.dumps(result, indent=4))
else:
for err in result.get("errors", []):
log.error(err["msg"])
# What's the rc? If status is present, use that
# If not, use length of errors
if "status" in result:
rc = int(not result["status"])
else:
rc = int(len(result.get("errors", [])) > 0)
# Caller should return if showing json, or status was present and False
exit_now = show_json or ("status" in result and rc)
return (rc, exit_now)
def packageNEVRA(pkg):
"""Return the package info as a NEVRA
:param pkg: The package details
:type pkg: dict
:returns: name-[epoch:]version-release-arch
:rtype: str
"""
if pkg["epoch"]:
return "%s-%s:%s-%s.%s" % (pkg["name"], pkg["epoch"], pkg["version"], pkg["release"], pkg["arch"])
else:
return "%s-%s-%s.%s" % (pkg["name"], pkg["version"], pkg["release"], pkg["arch"])
def get_arg(args, name, argtype=None):
"""Return optional value from args, and remaining args
:param args: list of arguments
:type args: list of strings
:param name: The argument to remove from the args list
:type name: string
:param argtype: Type to use for checking the argument value
:type argtype: type
:returns: (args, value)
:rtype: tuple
This removes the optional argument and value from the argument list, returns the new list,
and the value of the argument.
"""
try:
idx = args.index(name)
if len(args) < idx+2:
raise RuntimeError(f"{name} is missing the value")
value = args[idx+1]
except ValueError:
return (args, None)
if argtype:
value = argtype(value)
return (args[:idx]+args[idx+2:], value)

View File

@ -1,260 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
import os
import sys
import json
from urllib.parse import urlparse, urlunparse
from composer.unix_socket import UnixHTTPConnectionPool
def api_url(api_version, url):
"""Return the versioned path to the API route
:param api_version: The version of the API to talk to. eg. "0"
:type api_version: str
:param url: The API route to talk to
:type url: str
:returns: The full url to use for the route and API version
:rtype: str
"""
return os.path.normpath("/api/v%s/%s" % (api_version, url))
def append_query(url, query):
"""Add a query argument to a URL
The query should be of the form "param1=what&param2=ever", i.e., no
leading '?'. The new query data will be appended to any existing
query string.
:param url: The original URL
:type url: str
:param query: The query to append
:type query: str
:returns: The new URL with the query argument included
:rtype: str
"""
url_parts = urlparse(url)
if url_parts.query:
new_query = url_parts.query + "&" + query
else:
new_query = query
return urlunparse([url_parts[0], url_parts[1], url_parts[2],
url_parts[3], new_query, url_parts[5]])
def get_url_raw(socket_path, url):
"""Return the raw results of a GET request
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to request
:type url: str
:returns: The raw response from the server
:rtype: str
"""
http = UnixHTTPConnectionPool(socket_path)
r = http.request("GET", url)
if r.status == 400:
err = json.loads(r.data.decode("utf-8"))
if "status" in err and err["status"] == False:
msgs = [e["msg"] for e in err["errors"]]
raise RuntimeError(", ".join(msgs))
return r.data.decode('utf-8')
def get_url_json(socket_path, url):
"""Return the JSON results of a GET request
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to request
:type url: str
:returns: The json response from the server
:rtype: dict
"""
http = UnixHTTPConnectionPool(socket_path)
r = http.request("GET", url)
return json.loads(r.data.decode('utf-8'))
def get_url_json_unlimited(socket_path, url, total_fn=None):
"""Return the JSON results of a GET request
For URLs that use offset/limit arguments, this command will
fetch all results for the given request.
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to request
:type url: str
:returns: The json response from the server
:rtype: dict
"""
def default_total_fn(data):
"""Return the total number of available results"""
return data["total"]
http = UnixHTTPConnectionPool(socket_path)
# Start with limit=0 to just get the number of objects
total_url = append_query(url, "limit=0")
r_total = http.request("GET", total_url)
json_total = json.loads(r_total.data.decode('utf-8'))
# Where to get the total from
if not total_fn:
total_fn = default_total_fn
# Add the "total" returned by limit=0 as the new limit
unlimited_url = append_query(url, "limit=%d" % total_fn(json_total))
r_unlimited = http.request("GET", unlimited_url)
return json.loads(r_unlimited.data.decode('utf-8'))
def delete_url_json(socket_path, url):
"""Send a DELETE request to the url and return JSON response
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to send DELETE to
:type url: str
:returns: The json response from the server
:rtype: dict
"""
http = UnixHTTPConnectionPool(socket_path)
r = http.request("DELETE", url)
return json.loads(r.data.decode("utf-8"))
def post_url(socket_path, url, body):
"""POST raw data to the URL
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to send POST to
:type url: str
:param body: The data for the body of the POST
:type body: str
:returns: The json response from the server
:rtype: dict
"""
http = UnixHTTPConnectionPool(socket_path)
r = http.request("POST", url,
body=body.encode("utf-8"))
return json.loads(r.data.decode("utf-8"))
def post_url_toml(socket_path, url, body):
"""POST a TOML string to the URL
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to send POST to
:type url: str
:param body: The data for the body of the POST
:type body: str
:returns: The json response from the server
:rtype: dict
"""
http = UnixHTTPConnectionPool(socket_path)
r = http.request("POST", url,
body=body.encode("utf-8"),
headers={"Content-Type": "text/x-toml"})
return json.loads(r.data.decode("utf-8"))
def post_url_json(socket_path, url, body):
"""POST some JSON data to the URL
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to send POST to
:type url: str
:param body: The data for the body of the POST
:type body: str
:returns: The json response from the server
:rtype: dict
"""
http = UnixHTTPConnectionPool(socket_path)
r = http.request("POST", url,
body=body.encode("utf-8"),
headers={"Content-Type": "application/json"})
return json.loads(r.data.decode("utf-8"))
def get_filename(headers):
"""Get the filename from the response header
:param response: The urllib3 response object
:type response: Response
:raises: RuntimeError if it cannot find a filename in the header
:returns: Filename from content-disposition header
:rtype: str
"""
log.debug("Headers = %s", headers)
if "content-disposition" not in headers:
raise RuntimeError("No Content-Disposition header; cannot get filename")
try:
k, _, v = headers["content-disposition"].split(";")[1].strip().partition("=")
if k != "filename":
raise RuntimeError("No filename= found in content-disposition header")
except RuntimeError:
raise
except Exception as e:
raise RuntimeError("Error parsing filename from content-disposition header: %s" % str(e))
return os.path.basename(v)
def download_file(socket_path, url, progress=True):
"""Download a file, saving it to the CWD with the included filename
:param socket_path: Path to the Unix socket to use for API communication
:type socket_path: str
:param url: URL to send POST to
:type url: str
"""
http = UnixHTTPConnectionPool(socket_path)
r = http.request("GET", url, preload_content=False)
if r.status == 400:
err = json.loads(r.data.decode("utf-8"))
if not err["status"]:
msgs = [e["msg"] for e in err["errors"]]
raise RuntimeError(", ".join(msgs))
filename = get_filename(r.headers)
if os.path.exists(filename):
msg = "%s exists, skipping download" % filename
log.error(msg)
raise RuntimeError(msg)
with open(filename, "wb") as f:
while True:
data = r.read(10 * 1024**2)
if not data:
break
f.write(data)
if progress:
data_written = f.tell()
if data_written > 5 * 1024**2:
sys.stdout.write("%s: %0.2f MB \r" % (filename, data_written / 1024**2))
else:
sys.stdout.write("%s: %0.2f kB\r" % (filename, data_written / 1024))
sys.stdout.flush()
print("")
r.release_conn()
return 0

View File

@ -1,63 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import http.client
import socket
import urllib3
# These 2 classes were adapted and simplified for use with just urllib3.
# Originally from https://github.com/msabramo/requests-unixsocket/blob/master/requests_unixsocket/adapters.py
# The following was adapted from some code from docker-py
# https://github.com/docker/docker-py/blob/master/docker/transport/unixconn.py
class UnixHTTPConnection(http.client.HTTPConnection, object):
def __init__(self, socket_path, timeout=60*5):
"""Create an HTTP connection to a unix domain socket
:param socket_path: The path to the Unix domain socket
:param timeout: Number of seconds to timeout the connection
"""
super(UnixHTTPConnection, self).__init__('localhost', timeout=timeout)
self.socket_path = socket_path
self.sock = None
def __del__(self): # base class does not have d'tor
if self.sock:
self.sock.close()
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.timeout)
sock.connect(self.socket_path)
self.sock = sock
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, socket_path, timeout=60*5):
"""Create a connection pool using a Unix domain socket
:param socket_path: The path to the Unix domain socket
:param timeout: Number of seconds to timeout the connection
NOTE: retries are disabled for these connections, they are never useful
"""
super(UnixHTTPConnectionPool, self).__init__('localhost', timeout=timeout, retries=False)
self.socket_path = socket_path
def _new_conn(self):
return UnixHTTPConnection(self.socket_path, self.timeout)

View File

@ -320,7 +320,7 @@ def anaconda_cleanup(dirinstall_path):
# Make sure the process is really finished (it should be, since it was started from a subprocess call)
# and then remove the pid file.
if os.path.exists("/var/run/anaconda.pid"):
# lorax-composer runs anaconda using unshare so the pid is always 1
# anaconda may be started using unshare so the pid is always 1
if open("/var/run/anaconda.pid").read().strip() == "1":
os.unlink("/var/run/anaconda.pid")

View File

@ -15,16 +15,12 @@ python3-pocketlint
python3-psutil
python3-pycdlib
python3-pylint
python3-pyparted
python3-pytest
python3-pytest-cov
python3-pyvmomi
python3-rpmfluff
python3-semantic_version
python3-sphinx
python3-sphinx-argparse
python3-sphinx_rtd_theme
python3-toml
qemu-img
rsync
squashfs-tools

View File

@ -1,24 +0,0 @@
#!/usr/bin/python3
import composertest
class TestImages(composertest.ComposerTestCase):
"""
This is the "entry-point" to the test suite when
executed in Cockpit CI. If $TEST_SCENARIO=="" or
$TEST_SCENARIO="images" we end up here.
New test methods should be added here first!
When this target becomes too slow we split out into
separate scenarios!
"""
def test_blueprint_sanity(self):
self.runCliTest("/tests/cli/test_blueprints_sanity.sh")
def test_compose_sanity(self):
self.runCliTest("/tests/cli/test_compose_sanity.sh")
if __name__ == '__main__':
composertest.main()

View File

@ -2,10 +2,10 @@
import tempfile
import composertest
import loraxtest
class LoraxTestCase(composertest.ComposerTestCase):
class LoraxTestCase(loraxtest.TestCase):
def setUp(self):
self.setUpTestMachine()
@ -54,4 +54,4 @@ class TestLorax(LoraxTestCase):
if __name__ == '__main__':
composertest.main()
loraxtest.main()

View File

@ -120,7 +120,7 @@ class VirtMachineTestCase(unittest.TestCase):
self.boot_id = boot_id
class ComposerTestCase(VirtMachineTestCase):
class TestCase(VirtMachineTestCase):
def setUp(self):
self.setUpTestMachine()
@ -148,20 +148,8 @@ class ComposerTestCase(VirtMachineTestCase):
self.tearDownTestMachine()
return local_dir
def runCliTest(self, script):
extra_env = ["BACKEND=%s" % os.getenv('BACKEND', 'osbuild-composer')]
if self.sit:
extra_env.append("COMPOSER_TEST_FAIL_FAST=1")
r = self.execute(["CLI=/usr/bin/composer-cli",
"TEST=" + self.id(),
"PACKAGE=composer-cli",
*extra_env,
"/tests/test_cli.sh", script])
self.assertEqual(r.returncode, 0)
class ComposerTestResult(unittest.TestResult):
class TestResult(unittest.TestResult):
def name(self, test):
name = test.id().replace("__main__.", "")
if test.shortDescription():
@ -205,8 +193,8 @@ class ComposerTestResult(unittest.TestResult):
print("not ok {} {}".format(self.testsRun, self.name(test)))
class ComposerTestRunner(object):
"""A test runner that (in combination with ComposerTestResult) outputs
class TestRunner(object):
"""A test runner that (in combination with TestResult) outputs
results in a way that cockpit's log.html can read and format them.
"""
@ -214,7 +202,7 @@ class ComposerTestRunner(object):
self.failfast = failfast
def run(self, testable):
result = ComposerTestResult()
result = TestResult()
result.failfast = self.failfast
result.startTestRun()
count = testable.countTestCases()
@ -244,7 +232,7 @@ def main():
parser.add_argument("-s", "--sit", action="store_true", help="Halt test execution (but keep VM running) when a test fails")
args = parser.parse_args()
ComposerTestCase.sit = args.sit
TestCase.sit = args.sit
module = __import__("__main__")
if args.tests:
@ -256,7 +244,7 @@ def main():
print_tests(tests)
return 0
runner = ComposerTestRunner(failfast=args.sit)
runner = TestRunner(failfast=args.sit)
result = runner.run(tests)
if tests.countTestCases() != result.testsRun:

View File

@ -2,16 +2,9 @@
# This is the expected entry point for Cockpit CI; will be called without
# arguments but with an appropriate $TEST_OS, and optionally $TEST_SCENARIO
if [ "$TEST_SCENARIO" == "osbuild-composer" ]; then
rm -rf ./test/images/*
export BACKEND="osbuild-composer"
make BACKEND=osbuild-composer vm
else
if [ "$TEST_SCENARIO" != "osbuild-composer" ]; then
echo "$TEST_SCENARIO no longer supported by lorax"
exit 1
fi
make vm
fi
if [ "$TEST_SCENARIO" == "lorax" ]; then
test/check-lorax TestLorax
else
test/check-cli TestImages
fi

View File

@ -1,11 +1,10 @@
#!/bin/sh -eux
BACKEND="${BACKEND:-lorax-composer}"
SRPM="$1"
# always remove older versions of these RPMs if they exist
# to ensure newly built packages have been installed
yum -y remove lorax $BACKEND composer-cli
yum -y remove lorax
if ! rpm -q beakerlib; then
if [ $(. /etc/os-release && echo $ID) = "rhel" ]; then
@ -45,12 +44,7 @@ rm -rf build-results
su builder -c "/usr/bin/mock --verbose --no-clean --resultdir build-results --rebuild $SRPM"
packages=$(find build-results -name '*.rpm' -not -name '*.src.rpm')
if [ "$BACKEND" == "osbuild-composer" ]; then
packages=$(find build-results -name '*.rpm' -not -name '*.src.rpm' -not -name '*lorax-composer*')
fi
yum install -y $packages $BACKEND
systemctl enable $BACKEND.socket
yum install -y $packages
if [ -f /usr/bin/docker ]; then
yum remove -y $(rpm -qf /usr/bin/docker)

View File

@ -1,217 +0,0 @@
#!/usr/bin/env bash
. /usr/share/beakerlib/beakerlib.sh
BACKEND="${BACKEND:-lorax-composer}"
export BACKEND
# Monkey-patch beakerlib to exit on first failure if COMPOSER_TEST_FAIL_FAST is
# set. https://github.com/beakerlib/beakerlib/issues/42
COMPOSER_TEST_FAIL_FAST=${COMPOSER_TEST_FAIL_FAST:-0}
if [ "$COMPOSER_TEST_FAIL_FAST" == "1" ]; then
eval "original$(declare -f __INTERNAL_LogAndJournalFail)"
__INTERNAL_LogAndJournalFail () {
original__INTERNAL_LogAndJournalFail
# end test somewhat cleanly so that beakerlib logs the FAIL correctly
rlPhaseEnd
rlJournalEnd
exit 1
}
fi
setup_beakerlib_env() {
export BEAKERLIB_DIR=$(mktemp -d /tmp/composer-test.XXXXXX)
export BEAKERLIB_JOURNAL=0
}
run_beakerlib_tests() {
if [ -z "$*" ]; then
echo "run_beakerlib_tests() requires a test to execute"
else
# execute tests
for TEST in "$@"; do
$TEST
done
fi
}
parse_beakerlib_results() {
if [ ! -f "$BEAKERLIB_DIR/TestResults" ]; then
exit "$BEAKERLIB_DIR/TestResults not found" 1
fi
. $BEAKERLIB_DIR/TestResults
TESTRESULT_RESULT_ECODE="${TESTRESULT_RESULT_ECODE:-}"
if [ $TESTRESULT_RESULT_ECODE != 0 ]; then
echo "Test failed. Leaving log in $BEAKERLIB_DIR"
exit $TESTRESULT_RESULT_ECODE
fi
rm -rf $BEAKERLIB_DIR
}
export QEMU_BIN="/usr/bin/qemu-system-$(uname -m)"
export QEMU="$QEMU_BIN -machine accel=kvm:tcg"
export SSH_PORT=2222
boot_image() {
QEMU_BOOT=$1
TIMEOUT=$2
rlRun -t -c "$QEMU -m 2048 $QEMU_BOOT -nographic -monitor none \
-net user,id=nic0,hostfwd=tcp::$SSH_PORT-:22 -net nic \
-chardev null,id=log0,mux=on,logfile=/var/log$TEST/qemu.log,logappend=on \
-serial chardev:log0 &"
# wait for ssh to become ready (yes, http is the wrong protocol, but it returns the header)
tries=0
until curl --http0.9 -sS -m 15 "http://localhost:$SSH_PORT/" | grep 'OpenSSH'; do
tries=$((tries + 1))
if [ $tries -gt $TIMEOUT ]; then
exit 1
fi
sleep 1
echo "DEBUG: Waiting for ssh become ready before testing ..."
done;
}
wait_for_composer() {
tries=0
until curl -m 15 --unix-socket /run/weldr/api.socket http://localhost:4000/api/status | grep 'db_supported.*true'; do
tries=$((tries + 1))
if [ $tries -gt 50 ]; then
exit 1
fi
sleep 5
echo "DEBUG: Waiting for backend API to become ready before testing ..."
done;
}
composer_start() {
local rc
local params="$@"
if [ "$BACKEND" == "lorax-composer" ] && [[ -z "$CLI" || "$CLI" == "./src/bin/composer-cli" ]]; then
./src/sbin/lorax-composer $params --sharedir $SHARE_DIR $BLUEPRINTS_DIR &
elif [ "$BACKEND" == "lorax-composer" ] && [ -n "$params" ]; then
/usr/sbin/lorax-composer $params /var/lib/lorax/composer/blueprints &
else
# socket stop/start seems to be necessary for a proper service restart
# after a previous direct manual run for it to work properly
systemctl start $BACKEND.socket
systemctl start $BACKEND
fi
rc=$?
# wait for the backend to become ready
if [ "$rc" -eq 0 ]; then
wait_for_composer
else
rlLogFail "Unable to start $BACKEND (exit code $rc)"
fi
return $rc
}
composer_stop() {
MANUAL=${MANUAL:-0}
# socket stop/start seems to be necessary for a proper service restart
# after a previous direct manual run for it to work properly
if systemctl list-units | grep -q $BACKEND.socket; then
systemctl stop $BACKEND.socket
fi
if [[ -z "$CLI" || "$CLI" == "./src/bin/composer-cli" || "$MANUAL" == "1" ]]; then
pkill -9 lorax-composer
rm -f /run/weldr/api.socket
else
systemctl stop $BACKEND
fi
}
# a generic helper function unifying the specific checks executed on a running
# image instance
verify_image() {
SSH_USER="$1"
SSH_MACHINE="$2"
SSH_OPTS="-o StrictHostKeyChecking=no -o BatchMode=yes $3"
rlLogInfo "verify_image: SSH_OPTS:'$SSH_OPTS' SSH_USER:'$SSH_USER' SSH_MACHINE: '$SSH_MACHINE'"
check_root_account "$@"
if [ "$CHECK_CMDLINE" != 0 ]; then
check_kernel_cmdline "$@"
fi
}
check_root_account() {
# Try to SSH to a remote machine first using root account using password-based
# auth (this is expected to fail) and then using key-based auth with the
# supplied username to check content of /etc/shadow and audit.log.
#
# use: check_root_account <user> <machine> [ssh options]
ROOT_ACCOUNT_LOCKED=${ROOT_ACCOUNT_LOCKED:-1}
if [[ "$SSH_USER" == "" || "$SSH_MACHINE" == "" ]]; then
rlFail "check_root_account: Missing user or machine parameter."
return 1
fi
# If you are connected as root you do not need sudo
if [[ "$SSH_USER" == "root" ]]; then
SUDO=""
else
SUDO="sudo"
fi
if [ $ROOT_ACCOUNT_LOCKED == 0 ]; then
rlRun -t -c "ssh $SSH_OPTS ${SSH_USER}@${SSH_MACHINE} \"$SUDO passwd --status root | grep -E '^root\s+NP?'\"" \
0 "Password for root account in /etc/shadow is empty"
else
# ssh returns 255 in case of any ssh error, so it's better to grep the specific error message
rlRun -t -c "ssh $SSH_OPTS -o PubkeyAuthentication=no root@${SSH_MACHINE} 2>&1 | grep -i 'permission denied ('" \
0 "Can't ssh to '$SSH_MACHINE' as root using password-based auth"
rlRun -t -c "ssh $SSH_OPTS ${SSH_USER}@${SSH_MACHINE} \"$SUDO passwd --status root | grep -E '^root\s+LK?'\"" \
0 "root account is disabled in /etc/shadow"
rlRun -t -c "ssh $SSH_OPTS ${SSH_USER}@${SSH_MACHINE} \"$SUDO journalctl -g 'USER_LOGIN.*acct=\\\"root\\\".*terminal=ssh.*res=failed'\"" \
0 "audit.log contains entry about unsuccessful root login"
# We modify the default sshd settings on live ISO, so we can only check the default empty password setting
# outside of live ISO
rlRun -t -c "ssh $SSH_OPTS ${SSH_USER}@${SSH_MACHINE} '$SUDO grep -E \"^[[:blank:]]*PermitEmptyPasswords[[:blank:]]*yes\" /etc/ssh/sshd_config'" 1 \
"Login with empty passwords is disabled in sshd config file"
fi
rlRun -t -c "ssh $SSH_OPTS ${SSH_USER}@${SSH_MACHINE} 'cat /etc/redhat-release'"
}
# verify that a kernel command line argument was passed from the blueprint (this is added to the blueprint in ../test_cli.sh)
check_kernel_cmdline() {
rlRun -t -c "ssh $SSH_OPTS ${SSH_USER}@${SSH_MACHINE} 'grep custom_cmdline_arg /proc/cmdline'" 0 \
"System booted from the image contains specified parameter on kernel command line"
}
# Fail if the compose failed, only call after checking for FINISHED|FAILED
check_compose_status() {
UUID="$1"
if "$CLI" compose info "$UUID" | grep FAILED; then
rlFail "compose $UUID FAILED"
return 1
fi
}
# Wait until the compose is done (finished or failed)
wait_for_compose() {
local UUID=$1
if [ -n "$UUID" ]; then
until $CLI compose info $UUID | grep 'FINISHED\|FAILED'; do
sleep 20
rlLogInfo "Waiting for compose to finish ..."
done;
check_compose_status "$UUID"
rlRun -t -c "mkdir -p /var/log/$TEST"
rlRun -t -c "$CLI compose logs $UUID"
rlRun -t -c "mv $UUID-logs.tar /var/log/$TEST"
else
rlFail "Compose UUID is empty!"
fi
}

View File

@ -1,18 +0,0 @@
name = "test-http-server"
description = "Http server with PHP and MySQL support used in tests."
version = "0.0.1"
[[modules]]
name = "httpd"
version = "2.4.*"
[[modules]]
name = "php"
version = "7.*"
[[packages]]
name = "openssh-server"
version = "*"
[customizations.kernel]
append = "custom_cmdline_arg console=ttyS0,115200n8"

View File

@ -1,13 +0,0 @@
#!/usr/bin/env python
import sys
import toml
if len(sys.argv) != 3:
print("USAGE: ", __file__, "<blueprint-one.toml> <blueprint-two.toml>")
sys.exit(1)
blueprint_one = toml.loads(open(sys.argv[1]).read())
blueprint_two = toml.loads(open(sys.argv[2]).read())
assert blueprint_one == blueprint_two

View File

@ -1,115 +0,0 @@
#!/bin/bash
# Note: execute this file from the project root directory
set -e
. /usr/share/beakerlib/beakerlib.sh
. $(dirname $0)/lib/lib.sh
CLI="${CLI:-./src/bin/composer-cli}"
rlJournalStart
rlPhaseStartTest "blueprints list"
if [ "$BACKEND" != "osbuild-composer" ]; then
for bp in example-http-server example-development example-atlas; do
rlAssertEquals "blueprint list finds $bp" \
"`$CLI blueprints list | grep $bp`" "$bp"
done
fi
rlRun -t -c "$CLI blueprints push $(dirname $0)/lib/test-http-server.toml"
rlPhaseEnd
rlPhaseStartTest "blueprints save"
rlRun -t -c "$CLI blueprints save test-http-server"
rlAssertExists "test-http-server.toml"
rlAssertGrep "test-http-server" "test-http-server.toml"
rlAssertGrep "httpd" "test-http-server.toml"
# non-existing blueprint
rlRun -t -c "$CLI blueprints save non-existing-bp" 1
rlAssertNotExists "non-existing-bp.toml"
rlPhaseEnd
rlPhaseStartTest "blueprints push"
BLUEPRINT_NAME="openssh-server"
cat > $BLUEPRINT_NAME.toml << __EOF__
name = "$BLUEPRINT_NAME"
description = "Simple blueprint including only openssh"
version = "0.0.1"
modules = []
groups = []
[[packages]]
name = "openssh-server"
version = "*"
__EOF__
rlRun -t -c "$CLI blueprints push $BLUEPRINT_NAME.toml"
rlAssertEquals "pushed bp is found via list" "`$CLI blueprints list | grep $BLUEPRINT_NAME`" "$BLUEPRINT_NAME"
rlPhaseEnd
rlPhaseStartTest "blueprints show"
$CLI blueprints show $BLUEPRINT_NAME > shown-$BLUEPRINT_NAME.toml
rlRun -t -c "$(dirname $0)/lib/toml-compare $BLUEPRINT_NAME.toml shown-$BLUEPRINT_NAME.toml"
rlPhaseEnd
rlPhaseStartTest "SemVer .patch version is incremented automatically"
# version is still 0.0.1
rlAssertEquals "version is 0.0.1" "`$CLI blueprints show $BLUEPRINT_NAME | grep 0.0.1`" 'version = "0.0.1"'
# add a new package to the existing blueprint
cat >> $BLUEPRINT_NAME.toml << __EOF__
[[packages]]
name = "php"
version = "*"
__EOF__
# push again
rlRun -t -c "$CLI blueprints push $BLUEPRINT_NAME.toml"
# official documentation says:
# If a new blueprint is uploaded with the same version the server will
# automatically bump the PATCH level of the version. If the version
# doesn't match it will be used as is.
rlAssertEquals "version is 0.0.2" "`$CLI blueprints show $BLUEPRINT_NAME | grep 0.0.2`" 'version = "0.0.2"'
rlPhaseEnd
rlPhaseStartTest "blueprints delete"
rlRun -t -c "$CLI blueprints delete $BLUEPRINT_NAME"
rlAssertEquals "bp not found after delete" "`$CLI blueprints list | grep $BLUEPRINT_NAME`" ""
rlPhaseEnd
rlPhaseStartTest "start a compose with deleted blueprint"
cat > to-be-deleted.toml << __EOF__
name = "to-be-deleted"
description = "Dummy blueprint for testing compose start with a deleted blueprint"
version = "0.0.1"
__EOF__
rlRun -t -c "$CLI blueprints push to-be-deleted.toml"
rlRun -t -c "$CLI blueprints delete to-be-deleted"
rlRun -t -c "$CLI compose list | grep to-be-deleted" 1
rlRun -t -c "$CLI blueprints list | grep to-be-deleted" 1
compose_id=$($CLI compose start to-be-deleted tar)
rlAssertEquals "composer-cli exited with 1 when starting a compose using a deleted blueprint" "$?" "1"
compose_id=$(echo $compose_id | cut -f 2 -d' ')
if [ -z "$compose_id" ]; then
rlPass "It wasn't possible to start a compose using a deleted blueprint."
else
rlFail "It was possible to start a compose using a deleted blueprint!"
# don't wait for the compose to finish if it started unexpectedly, and do cleanup
rlRun -t -c "$CLI compose cancel $compose_id"
rlRun -t -c "$CLI compose delete $compose_id"
fi
rlRun -t -c "rm -f to-be-deleted.toml"
unset compose_id
rlPhaseEnd
rlPhaseStartCleanup
rlRun -t -c "rm *.toml"
rlPhaseEnd
rlJournalEnd
rlJournalPrintText

View File

@ -1,114 +0,0 @@
#!/bin/bash
# Note: execute this file from the project root directory
set -e
. /usr/share/beakerlib/beakerlib.sh
. $(dirname $0)/lib/lib.sh
CLI="${CLI:-./src/bin/composer-cli}"
rlJournalStart
rlPhaseStartTest "compose types"
TYPE_LIVE_ISO="live-iso"
TYPE_ALIBABA="alibaba"
TYPE_GOOGLE="google"
TYPE_HYPER_V="hyper-v"
TYPE_LIVEIMG="liveimg-tar"
TYPE_EXT4="ext4-filesystem"
TYPE_PARTITIONED_DISK="partitioned-disk"
TYPE_TAR="tar"
TYPE_IOT=""
# backend specific compose type overrides
if [ "$BACKEND" == "osbuild-composer" ]; then
TYPE_LIVE_ISO=""
TYPE_ALIBABA=""
TYPE_GOOGLE=""
TYPE_HYPER_V=""
TYPE_LIVEIMG=""
TYPE_EXT4=""
TYPE_PARTITIONED_DISK=""
TYPE_TAR=""
TYPE_IOT="fedora-iot-commit"
fi
# arch specific compose type selections
if [ "$(uname -m)" == "x86_64" ]; then
SUPPORTED_TYPES="$TYPE_ALIBABA ami $TYPE_IOT $TYPE_EXT4 $TYPE_GOOGLE $TYPE_HYPER_V $TYPE_LIVE_ISO $TYPE_LIVEIMG openstack $TYPE_PARTITIONED_DISK qcow2 $TYPE_TAR vhd vmdk"
elif [ "$(uname -m)" == "aarch64" ]; then
# ami is supported on aarch64
SUPPORTED_TYPES="ami $TYPE_EXT4 $TYPE_LIVE_ISO $TYPE_LIVEIMG openstack $TYPE_PARTITIONED_DISK qcow2 $TYPE_TAR"
else
SUPPORTED_TYPES="$TYPE_EXT4 $TYPE_LIVE_ISO $TYPE_LIVEIMG openstack $TYPE_PARTITIONED_DISK qcow2 $TYPE_TAR"
fi
# truncate white space in case some types are not available
SUPPORTED_TYPES=$(echo "$SUPPORTED_TYPES" | tr -s ' ' | sed 's/^[[:space:]]*//')
rlAssertEquals "lists all supported types" "`$CLI compose types | xargs`" "$SUPPORTED_TYPES"
rlPhaseEnd
rlPhaseStartTest "compose start"
rlRun -t -c "$CLI blueprints push $(dirname $0)/lib/test-http-server.toml"
UUID=`$CLI compose start test-http-server qcow2`
rlAssertEquals "exit code should be zero" $? 0
UUID=`echo $UUID | cut -f 2 -d' '`
if [ -n "$UUID" ]; then
until $CLI compose info $UUID | grep 'RUNNING'; do
sleep 20
rlLogInfo "Waiting for compose to start running..."
if $CLI compose info $UUID | grep 'FAILED'; then
rlFail "Compose FAILED!"
break
fi
done;
else
rlFail "Compose UUID is empty!"
fi
rlPhaseEnd
rlPhaseStartTest "cancel compose"
rlRun -t -c "$CLI compose cancel $UUID"
if [ "$BACKEND" == "lorax-composer" ]; then
rlRun -t -c "$CLI compose info $UUID" 1 "compose is canceled"
fi
rlPhaseEnd
if [ -z "$SKIP_IMAGE_BUILD" ]; then
rlPhaseStartTest "compose start again"
UUID=`$CLI compose start test-http-server qcow2`
rlAssertEquals "exit code should be zero" $? 0
UUID=`echo $UUID | cut -f 2 -d' '`
rlPhaseEnd
rlPhaseStartTest "compose image"
wait_for_compose $UUID
if [ -n "$UUID" ]; then
check_compose_status "$UUID"
rlRun -t -c "$CLI compose image $UUID"
rlAssertExists "$UUID-disk.qcow2"
fi
if [ "$BACKEND" != "osbuild-composer" ]; then
# because this path is listed in the documentation
rlAssertExists "/var/lib/lorax/composer/results/$UUID/"
rlAssertExists "/var/lib/lorax/composer/results/$UUID/disk.qcow2"
rlAssertNotDiffer "/var/lib/lorax/composer/results/$UUID/disk.qcow2" "$UUID-disk.qcow2"
fi
rlPhaseEnd
else
rlLogInfo "Skipping image build phases"
fi
rlPhaseStartCleanup
if [ "$($CLI compose list | grep -c $UUID)" == "1" ]; then
rlRun -t -c "$CLI compose delete $UUID"
fi
rlPhaseEnd
rlJournalEnd
rlJournalPrintText

View File

@ -1,321 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import tempfile
import unittest
from ..lib import captured_output
from composer.cli.blueprints import pretty_diff_entry, blueprints_list, blueprints_show, blueprints_changes
from composer.cli.blueprints import blueprints_diff, blueprints_save, blueprints_delete, blueprints_depsolve
from composer.cli.blueprints import blueprints_push, blueprints_freeze, blueprints_undo, blueprints_tag
from composer.cli.blueprints import pretty_dict, dict_names
diff_entries = [{'new': {'Description': 'Shiny new description'}, 'old': {'Description': 'Old reliable description'}},
{'new': {'Version': '0.3.1'}, 'old': {'Version': '0.1.1'}},
{'new': {'Module': {'name': 'openssh', 'version': '2.8.1'}}, 'old': None},
{'new': None, 'old': {'Module': {'name': 'bash', 'version': '5.*'}}},
{'new': {'Module': {'name': 'httpd', 'version': '3.8.*'}},
'old': {'Module': {'name': 'httpd', 'version': '3.7.*'}}},
{'new': {'Package': {'name': 'git', 'version': '2.13.*'}}, 'old': None},
# New items
{"new": {"Group": {"name": "core"}}, "old": None},
{"new": {"Customizations.firewall": {"ports": ["8888:tcp", "22:tcp", "dns:udp", "9090:tcp"], "services": ["smtp"]}}, "old": None},
{"new": {"Customizations.hostname": "foobar"}, "old": None},
{"new": {"Customizations.locale": {"keyboard": "US"}}, "old": None},
{"new": {"Customizations.sshkey": [{"key": "ssh-rsa AAAAB3NzaC1... norm@localhost.localdomain", "user": "norm" }]}, "old": None},
{"new": {"Customizations.timezone": {"ntpservers": ["ntp.nowhere.com" ], "timezone": "PST8PDT"}}, "old": None},
{"new": {"Customizations.user": [{"key": "ssh-rsa AAAAB3NzaC1... root@localhost.localdomain", "name": "root", "password": "fobarfobar"}]}, "old": None},
{"new": {"Repos.git": {"destination": "/opt/server-1/", "ref": "v1.0", "repo": "PATH OF GIT REPO TO CLONE", "rpmname": "server-config", "rpmrelease": "2", "rpmversion": "1.0", "summary": "Setup files for server deployment"}}, "old": None},
# Removed items (just reversed old/old from above block)
{"old": {"Group": {"name": "core"}}, "new": None},
{"old": {"Customizations.firewall": {"ports": ["8888:tcp", "22:tcp", "dns:udp", "9090:tcp"], "services": ["smtp"]}}, "new": None},
{"old": {"Customizations.hostname": "foobar"}, "new": None},
{"old": {"Customizations.locale": {"keyboard": "US"}}, "new": None},
{"old": {"Customizations.sshkey": [{"key": "ssh-rsa AAAAB3NzaC1... norm@localhost.localdomain", "user": "norm" }]}, "new": None},
{"old": {"Customizations.timezone": {"ntpservers": ["ntp.nowhere.com" ], "timezone": "PST8PDT"}}, "new": None},
{"old": {"Customizations.user": [{"key": "ssh-rsa AAAAB3NzaC1... root@localhost.localdomain", "name": "root", "password": "fobarfobar"}]}, "new": None},
{"old": {"Repos.git": {"destination": "/opt/server-1/", "ref": "v1.0", "repo": "PATH OF GIT REPO TO CLONE", "rpmname": "server-config", "rpmrelease": "2", "rpmversion": "1.0", "summary": "Setup files for server deployment"}}, "new": None},
# Changed items
{"old": {"Customizations.firewall": {"ports": ["8888:tcp", "22:tcp", "dns:udp", "9090:tcp"], "services": ["smtp"]}}, "new": {"Customizations.firewall": {"ports": ["8888:tcp", "22:tcp", "25:tcp"]}}},
{"old": {"Customizations.hostname": "foobar"}, "new": {"Customizations.hostname": "grues"}},
{"old": {"Customizations.locale": {"keyboard": "US"}}, "new": {"Customizations.locale": {"keyboard": "US", "languages": ["en_US.UTF-8"]}}},
{"old": {"Customizations.sshkey": [{"key": "ssh-rsa AAAAB3NzaC1... norm@localhost.localdomain", "user": "norm" }]}, "new": {"Customizations.sshkey": [{"key": "ssh-rsa ABCDEF01234... norm@localhost.localdomain", "user": "norm" }]}},
{"old": {"Customizations.timezone": {"ntpservers": ["ntp.nowhere.com" ], "timezone": "PST8PDT"}}, "new": {"Customizations.timezone": {"timezone": "Antarctica/Palmer"}}},
{"old": {"Customizations.user": [{"key": "ssh-rsa AAAAB3NzaC1... root@localhost.localdomain", "name": "root", "password": "fobarfobar"}]}, "new": {"Customizations.user": [{"key": "ssh-rsa AAAAB3NzaC1... root@localhost.localdomain", "name": "root", "password": "qweqweqwe"}]}},
{"old": {"Repos.git": {"destination": "/opt/server-1/", "ref": "v1.0", "repo": "PATH OF GIT REPO TO CLONE", "rpmname": "server-config", "rpmrelease": "2", "rpmversion": "1.0", "summary": "Setup files for server deployment"}}, "new": {"Repos.git": {"destination": "/opt/server-1/", "ref": "v1.0", "repo": "PATH OF GIT REPO TO CLONE", "rpmname": "server-config", "rpmrelease": "1", "rpmversion": "1.1", "summary": "Setup files for server deployment"}}}
]
diff_result = [
'Changed Description "Old reliable description" -> "Shiny new description"',
'Changed Version 0.1.1 -> 0.3.1',
'Added Module openssh 2.8.1',
'Removed Module bash 5.*',
'Changed Module httpd 3.7.* -> 3.8.*',
'Added Package git 2.13.*',
'Added Group core',
'Added Customizations.firewall ports="8888:tcp, 22:tcp, dns:udp, 9090:tcp" services="smtp"',
'Added Customizations.hostname foobar',
'Added Customizations.locale keyboard="US"',
'Added Customizations.sshkey norm',
'Added Customizations.timezone ntpservers="ntp.nowhere.com" timezone="PST8PDT"',
'Added Customizations.user root',
'Added Repos.git destination="/opt/server-1/" ref="v1.0" repo="PATH OF GIT REPO TO CLONE" rpmname="server-config" rpmrelease="2" rpmversion="1.0" summary="Setup files for server deployment"',
'Removed Group core',
'Removed Customizations.firewall ports="8888:tcp, 22:tcp, dns:udp, 9090:tcp" services="smtp"',
'Removed Customizations.hostname foobar',
'Removed Customizations.locale keyboard="US"',
'Removed Customizations.sshkey norm',
'Removed Customizations.timezone ntpservers="ntp.nowhere.com" timezone="PST8PDT"',
'Removed Customizations.user root',
'Removed Repos.git destination="/opt/server-1/" ref="v1.0" repo="PATH OF GIT REPO TO CLONE" rpmname="server-config" rpmrelease="2" rpmversion="1.0" summary="Setup files for server deployment"',
'Changed Customizations.firewall ports="8888:tcp, 22:tcp, dns:udp, 9090:tcp" services="smtp" -> ports="8888:tcp, 22:tcp, 25:tcp"',
'Changed Customizations.hostname foobar -> grues',
'Changed Customizations.locale keyboard="US" -> keyboard="US" languages="en_US.UTF-8"',
'Changed Customizations.sshkey norm -> norm',
'Changed Customizations.timezone ntpservers="ntp.nowhere.com" timezone="PST8PDT" -> timezone="Antarctica/Palmer"',
'Changed Customizations.user root -> root',
'Changed Repos.git destination="/opt/server-1/" ref="v1.0" repo="PATH OF GIT REPO TO CLONE" rpmname="server-config" rpmrelease="2" rpmversion="1.0" summary="Setup files for server deployment" -> destination="/opt/server-1/" ref="v1.0" repo="PATH OF GIT REPO TO CLONE" rpmname="server-config" rpmrelease="1" rpmversion="1.1" summary="Setup files for server deployment"',
]
dict_entries = [{"ports": ["8888:tcp", "22:tcp", "dns:udp", "9090:tcp"]},
{"ports": ["8888:tcp", "22:tcp", "dns:udp", "9090:tcp"], "services": ["smtp"]},
{ "destination": "/opt/server-1/", "ref": "v1.0", "repo": "PATH OF GIT REPO TO CLONE", "rpmname": "server-config", "rpmrelease": "1", "rpmversion": "1.0", "summary": "Setup files for server deployment" },
{"foo": ["one", "two"], "bar": {"baz": "three"}}]
dict_results = ['ports="8888:tcp, 22:tcp, dns:udp, 9090:tcp"',
'ports="8888:tcp, 22:tcp, dns:udp, 9090:tcp" services="smtp"',
'destination="/opt/server-1/" ref="v1.0" repo="PATH OF GIT REPO TO CLONE" rpmname="server-config" rpmrelease="1" rpmversion="1.0" summary="Setup files for server deployment"',
'foo="one, two"']
dict_name_entry1 = [{"name": "bart", "home": "Springfield"},
{"name": "lisa", "instrument": "Saxaphone"},
{"name": "homer", "kids": ["bart", "maggie", "lisa"]}]
dict_name_results1 = "bart, lisa, homer"
dict_name_entry2 = [{"user": "root", "password": "qweqweqwe"},
{"user": "norm", "password": "b33r"},
{"user": "cliff", "password": "POSTMASTER"}]
dict_name_results2 = "root, norm, cliff"
dict_name_entry3 = [{"home": "/root", "key": "skeleton"},
{"home": "/home/norm", "key": "SSH KEY"},
{"home": "/home/cliff", "key": "lost"}]
dict_name_results3 = "/root, /home/norm, /home/cliff"
HTTP_BLUEPRINT = b"""name = "example-http-server"
description = "An example http server with PHP and MySQL support."
version = "0.0.1"
[[packages]]
name = "httpd"
version = "*"
[[packages]]
name = "tmux"
version = "*"
[[packages]]
name = "openssh-server"
version = "*"
[[packages]]
name = "rsync"
version = "*"
[[modules]]
name = "php"
version = "*"
"""
DEV_BLUEPRINT = b"""name = "example-development"
description = "A general purpose development image"
[[packages]]
name = "cmake"
version = "*"
[[packages]]
name = "curl"
version = "*"
[[packages]]
name = "gcc"
version = "*"
[[packages]]
name = "gdb"
version = "*"
"""
class BlueprintsTest(unittest.TestCase):
def test_pretty_diff_entry(self):
"""Return a nice representation of a diff entry"""
self.assertEqual([pretty_diff_entry(entry) for entry in diff_entries], diff_result)
def test_pretty_dict(self):
"""Return a human readable single line"""
self.assertEqual([pretty_dict(entry) for entry in dict_entries], dict_results)
def test_dict_names_users(self):
"""Return a list of the name field of the list of dicts"""
self.assertEqual(dict_names(dict_name_entry1), dict_name_results1)
def test_dict_names_sshkey(self):
"""Return a list of the user field of the list of dicts"""
self.assertEqual(dict_names(dict_name_entry2), dict_name_results2)
def test_dict_names_other(self):
"""Return a list of the unknown field of the list of dicts"""
self.assertEqual(dict_names(dict_name_entry3), dict_name_results3)
@unittest.skipUnless(os.path.exists("/run/weldr/api.socket"), "Tests require a running API server")
class ServerBlueprintsTest(unittest.TestCase):
# MUST come first, tests push and installs required blueprints
def test_0000(self):
"""initialize server blueprints"""
for blueprint in [HTTP_BLUEPRINT, DEV_BLUEPRINT]:
with tempfile.NamedTemporaryFile(prefix="composer.test.") as tf:
tf.write(blueprint)
tf.file.close()
rc = blueprints_push("/run/weldr/api.socket", 0, [tf.name], show_json=False)
self.assertTrue(rc == 0)
def test_list(self):
"""blueprints list"""
with captured_output() as (out, _):
rc = blueprints_list("/run/weldr/api.socket", 0, [], show_json=False)
output = out.getvalue().strip()
self.assertTrue(rc == 0)
self.assertTrue("example-http-server" in output)
def test_show(self):
"""blueprints show"""
with captured_output() as (out, _):
blueprints_show("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
output = out.getvalue().strip()
self.assertTrue("example-http-server" in output)
self.assertTrue("[[packages]]" in output)
self.assertTrue("[[modules]]" in output)
def test_changes(self):
"""blueprints changes"""
with captured_output() as (out, _):
rc = blueprints_changes("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
output = out.getvalue().strip()
self.assertTrue(rc == 0)
self.assertTrue("example-http-server" in output)
self.assertTrue("Recipe example-http-server, version 0.0.1 saved." in output)
def test_save_0(self):
"""blueprints save"""
blueprints_save("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
self.assertTrue(os.path.exists("example-http-server.toml"))
def test_save_1(self):
"""blueprints push"""
rc = blueprints_push("/run/weldr/api.socket", 0, ["example-http-server.toml"], show_json=False)
self.assertTrue(rc == 0)
def test_delete(self):
"""blueprints delete"""
rc = blueprints_delete("/run/weldr/api.socket", 0, ["example-development"], show_json=False)
self.assertTrue(rc == 0)
def test_depsolve(self):
"""blueprints depsolve"""
with captured_output() as (out, _):
rc = blueprints_depsolve("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
output = out.getvalue().strip()
self.assertTrue(rc == 0)
self.assertTrue("blueprint: example-http-server v" in output)
self.assertTrue("httpd" in output)
def test_freeze_show(self):
"""blueprints freeze show"""
with captured_output() as (out, _):
rc = blueprints_freeze("/run/weldr/api.socket", 0, ["show", "example-http-server"], show_json=False)
output = out.getvalue().strip()
self.assertTrue(rc == 0)
self.assertTrue("version" in output)
self.assertTrue("example-http-server" in output)
self.assertTrue("x86_64" in output)
self.assertTrue("[[packages]]" in output)
self.assertTrue("[[modules]]" in output)
def test_freeze_save(self):
"""blueprints freeze save"""
rc = blueprints_freeze("/run/weldr/api.socket", 0, ["save", "example-http-server"], show_json=False)
self.assertTrue(rc == 0)
self.assertTrue(os.path.exists("example-http-server.frozen.toml"))
def test_freeze(self):
"""blueprints freeze"""
with captured_output() as (out, _):
rc = blueprints_freeze("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
output = out.getvalue().strip()
self.assertTrue(rc == 0)
self.assertTrue("blueprint: example-http-server v" in output)
self.assertTrue("httpd" in output)
self.assertTrue("x86_64" in output)
def test_tag(self):
"""blueprints tag"""
rc = blueprints_tag("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
self.assertTrue(rc == 0)
def test_undo(self):
"""blueprints undo"""
# Get the oldest commit, it should be 2nd to last line
with captured_output() as (out, _):
rc = blueprints_changes("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
output = out.getvalue().strip().splitlines()
first_commit = output[-2].split()[1]
with captured_output() as (out, _):
rc = blueprints_undo("/run/weldr/api.socket", 0, ["example-http-server", first_commit, "HEAD"], show_json=False)
output = out.getvalue().strip()
self.assertTrue(rc == 0)
def test_workspace(self):
"""blueprints workspace"""
rc = blueprints_push("/run/weldr/api.socket", 0, ["example-http-server.toml"], show_json=False)
self.assertTrue(rc == 0)
# XXX MUST COME LAST
# XXX which is what _z_ ensures
@unittest.expectedFailure
def test_z_diff(self):
"""blueprints diff"""
# Get the oldest commit, it should be 2nd to last line
with captured_output() as (out, _):
rc = blueprints_changes("/run/weldr/api.socket", 0, ["example-http-server"], show_json=False)
output = out.getvalue().strip().splitlines()
first_commit = output[-2].split()[1]
with captured_output() as (out, _):
rc = blueprints_diff("/run/weldr/api.socket", 0, ["example-http-server", first_commit, "NEWEST"], show_json=False)
output = out.getvalue().strip()
self.assertTrue(rc == 0)
self.assertTrue("Changed Version" in output)

View File

@ -1,504 +0,0 @@
#
# Copyright (C) 2020 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from http import HTTPStatus
from http.server import BaseHTTPRequestHandler
import json
import shutil
from socketserver import UnixStreamServer
import threading
import tempfile
import unittest
from composer import http_client as client
import composer.cli as cli
from composer.cli.cmdline import composer_cli_parser
from composer.cli.compose import get_size
# Use a GLOBAL record the request data for use in the test
# because there is no way to access the request handler class from the test methods
LAST_REQUEST = {}
# Test data for upload profile
PROFILE_TOML = """
provider = "aws"
[settings]
aws_access_key = "AWS Access Key"
aws_bucket = "AWS Bucket"
aws_region = "AWS Region"
aws_secret_key = "AWS Secret Key"
"""
class MyUnixServer(UnixStreamServer):
def get_request(self):
"""There is no client address for Unix Domain Sockets, so return the server address"""
req, _ = self.socket.accept()
return (req, self.server_address)
class APIHTTPHandler(BaseHTTPRequestHandler):
STATUS = {}
def send_json_response(self, status, d):
"""Send a 200 with a JSON body"""
body = json.dumps(d).encode("UTF-8")
self.send_response(status)
self.send_header("Content-Type", "application/json")
self.send_header('Content-Length', str(len(body)))
self.end_headers()
self.wfile.write(body)
def send_api_status_dict(self):
self.send_json_response(HTTPStatus.OK, self.STATUS)
def send_api_status(self, status=True, errors=None):
if status:
self.send_json_response(HTTPStatus.OK, {"status": True})
else:
self.send_json_response(HTTPStatus.BAD_REQUEST,
{"status": False, "errors": [{"id": "0", "msg": "Test Framework"}]})
def save_request(self):
global LAST_REQUEST
LAST_REQUEST = {
"command": self.command,
"path": self.path,
"headers": self.headers,
"body": "",
}
try:
length = int(self.headers.get('content-length'))
LAST_REQUEST["body"] = self.rfile.read(length)
except (ValueError, TypeError):
pass
print("%s" % LAST_REQUEST)
def do_GET(self):
self.save_request()
if self.path == "/api/status":
self.send_api_status_dict()
def do_POST(self):
# Need to check for /api/status and send the correct response
self.save_request()
self.send_api_status(True)
class LoraxAPIv0HTTPHandler(APIHTTPHandler):
STATUS = {
"api": "0",
"backend": "lorax-composer",
"build": "devel",
"db_supported": True,
"db_version": "0",
"msgs": [],
"schema_version": "0"
}
class LoraxAPIv1HTTPHandler(APIHTTPHandler):
STATUS = {
"api": "1",
"backend": "lorax-composer",
"build": "devel",
"db_supported": True,
"db_version": "0",
"msgs": [],
"schema_version": "0"
}
class OsBuildAPIv0HTTPHandler(APIHTTPHandler):
STATUS = {
"api": "0",
"backend": "osbuild-composer",
"build": "devel",
"db_supported": True,
"db_version": "0",
"msgs": [],
"schema_version": "0"
}
class OsBuildAPIv1HTTPHandler(APIHTTPHandler):
STATUS = {
"api": "1",
"backend": "osbuild-composer",
"build": "devel",
"db_supported": True,
"db_version": "0",
"msgs": [],
"schema_version": "0"
}
class ComposeTestCase(unittest.TestCase):
def run_test(self, args):
global LAST_REQUEST
LAST_REQUEST = {}
p = composer_cli_parser()
opts = p.parse_args(args)
status = cli.main(opts)
LAST_REQUEST["cli_status"] = status
return LAST_REQUEST
class ComposeLoraxV0TestCase(ComposeTestCase):
@classmethod
def setUpClass(self):
self.tmpdir = tempfile.mkdtemp(prefix="composer-cli.test.")
self.socket = self.tmpdir + "/api.socket"
self.server = MyUnixServer(self.socket, LoraxAPIv0HTTPHandler)
self.thread = threading.Thread(target=self.server.serve_forever)
self.thread.daemon = True
self.thread.start()
@classmethod
def tearDownClass(self):
self.server.shutdown()
self.thread.join(10)
shutil.rmtree(self.tmpdir)
def test_status(self):
"""Make sure the mock status response is working"""
global LAST_REQUEST
LAST_REQUEST = {}
result = client.get_url_json(self.socket, "/api/status")
self.assertTrue("path" in LAST_REQUEST)
self.assertEqual(LAST_REQUEST["path"], "/api/status")
self.assertEqual(result, LoraxAPIv0HTTPHandler.STATUS)
def test_compose_start_plain(self):
result = self.run_test(["--socket", self.socket, "--api", "0", "compose", "start", "http-server", "qcow2"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master"})
class ComposeLoraxV1TestCase(ComposeTestCase):
@classmethod
def setUpClass(self):
self.tmpdir = tempfile.mkdtemp(prefix="composer-cli.test.")
self.socket = self.tmpdir + "api.socket"
self.server = MyUnixServer(self.socket, LoraxAPIv1HTTPHandler)
self.thread = threading.Thread(target=self.server.serve_forever)
self.thread.daemon = True
self.thread.start()
@classmethod
def tearDownClass(self):
self.server.shutdown()
self.thread.join(10)
shutil.rmtree(self.tmpdir)
def test_status(self):
"""Make sure the mock status response is working"""
global LAST_REQUEST
LAST_REQUEST = {}
result = client.get_url_json(self.socket, "/api/status")
self.assertTrue("path" in LAST_REQUEST)
self.assertEqual(LAST_REQUEST["path"], "/api/status")
self.assertEqual(result, LoraxAPIv1HTTPHandler.STATUS)
def test_compose_start_plain(self):
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start", "http-server", "qcow2"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master"})
def test_compose_start_upload(self):
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start", "http-server", "qcow2", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master",
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
def test_compose_start_provider(self):
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start", "http-server", "qcow2", "httpimage", "aws", "production"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master",
"upload": {"image_name": "httpimage", "profile": "production", "provider": "aws"}})
class ComposeOsBuildV0TestCase(ComposeTestCase):
@classmethod
def setUpClass(self):
self.tmpdir = tempfile.mkdtemp(prefix="composer-cli.test.")
self.socket = self.tmpdir + "api.socket"
self.server = MyUnixServer(self.socket, OsBuildAPIv0HTTPHandler)
self.thread = threading.Thread(target=self.server.serve_forever)
self.thread.daemon = True
self.thread.start()
@classmethod
def tearDownClass(self):
self.server.shutdown()
self.thread.join(10)
shutil.rmtree(self.tmpdir)
def test_status(self):
"""Make sure the mock status response is working"""
global LAST_REQUEST
LAST_REQUEST = {}
result = client.get_url_json(self.socket, "/api/status")
self.assertTrue("path" in LAST_REQUEST)
self.assertEqual(LAST_REQUEST["path"], "/api/status")
self.assertEqual(result, OsBuildAPIv0HTTPHandler.STATUS)
def test_compose_start_plain(self):
result = self.run_test(["--socket", self.socket, "--api", "0", "compose", "start", "http-server", "qcow2"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master"})
class ComposeOsBuildV1TestCase(ComposeTestCase):
@classmethod
def setUpClass(self):
self.tmpdir = tempfile.mkdtemp(prefix="composer-cli.test.")
self.socket = self.tmpdir + "api.socket"
self.server = MyUnixServer(self.socket, OsBuildAPIv1HTTPHandler)
self.thread = threading.Thread(target=self.server.serve_forever)
self.thread.daemon = True
self.thread.start()
@classmethod
def tearDownClass(self):
self.server.shutdown()
self.thread.join(10)
shutil.rmtree(self.tmpdir)
def test_status(self):
"""Make sure the mock status response is working"""
global LAST_REQUEST
LAST_REQUEST = {}
result = client.get_url_json(self.socket, "/api/status")
self.assertTrue("path" in LAST_REQUEST)
self.assertEqual(LAST_REQUEST["path"], "/api/status")
self.assertEqual(result, OsBuildAPIv1HTTPHandler.STATUS)
def test_compose_start_plain(self):
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start", "http-server", "qcow2"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master"})
def test_compose_start_upload(self):
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start", "http-server", "qcow2", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master",
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
def test_compose_start_plain_size(self):
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start", "--size", "1776", "http-server", "qcow2"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master", "size": 1862270976})
def test_compose_start_size_upload(self):
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start", "--size", "1791", "http-server", "qcow2", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "qcow2", "branch": "master", "size": 1877999616,
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
def test_compose_start_ostree_noargs(self):
"""Test start-ostree with no parent and no ref"""
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "http-server", "fedora-iot-commit"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "", "parent": ""}})
def test_compose_start_ostree_parent(self):
"""Test start-ostree with --parent"""
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--parent", "parenturl", "http-server", "fedora-iot-commit"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "", "parent": "parenturl"}})
def test_compose_start_ostree_ref(self):
"""Test start-ostree with --ref"""
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--ref", "refid", "http-server", "fedora-iot-commit"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "refid", "parent": ""}})
def test_compose_start_ostree_refparent(self):
"""Test start-ostree with --ref and --parent"""
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--ref", "refid", "--parent", "parenturl", "http-server", "fedora-iot-commit"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "refid", "parent": "parenturl"}})
def test_compose_start_ostree_size(self):
"""Test start-ostree with --size, --ref and --parent"""
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--size", "2048", "--ref", "refid", "--parent", "parenturl", "http-server", "fedora-iot-commit"])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"size": 2147483648,
"ostree": {"ref": "refid", "parent": "parenturl"}})
def test_compose_start_ostree_missing(self):
"""Test start-ostree with missing argument"""
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "http-server"])
self.assertTrue(result is not None)
self.assertTrue("cli_status" in result)
self.assertEqual(result["cli_status"], 1)
def test_compose_start_ostree_upload_parent(self):
"""Test start-ostree upload with --parent"""
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--parent", "parenturl", "http-server", "fedora-iot-commit", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "", "parent": "parenturl"},
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
def test_compose_start_ostree_upload_ref(self):
"""Test start-ostree upload with --ref"""
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--ref", "refid", "http-server", "fedora-iot-commit", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "refid", "parent": ""},
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
def test_compose_start_ostree_upload_refparent(self):
"""Test start-ostree upload with --ref and --parent"""
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--parent", "parenturl", "--ref", "refid", "http-server", "fedora-iot-commit", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "refid", "parent": "parenturl"},
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
def test_compose_start_ostree_upload_size(self):
"""Test start-ostree upload with --size, --ref and --parent"""
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "--size", "2048", "--parent", "parenturl", "--ref", "refid", "http-server", "fedora-iot-commit", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"size": 2147483648,
"ostree": {"ref": "refid", "parent": "parenturl"},
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
def test_compose_start_ostree_upload(self):
with tempfile.NamedTemporaryFile(prefix="composer-cli.test.") as f:
f.write(PROFILE_TOML.encode("UTF-8"))
f.seek(0)
result = self.run_test(["--socket", self.socket, "--api", "1", "compose", "start-ostree", "http-server", "fedora-iot-commit", "httpimage", f.name])
self.assertTrue(result is not None)
self.assertTrue("body" in result)
self.assertGreater(len(result["body"]), 0)
jd = json.loads(result["body"])
self.assertEqual(jd, {"blueprint_name": "http-server", "compose_type": "fedora-iot-commit", "branch": "master",
"ostree": {"ref": "", "parent": ""},
"upload": {"image_name": "httpimage", "provider": "aws",
"settings": {"aws_access_key": "AWS Access Key", "aws_bucket": "AWS Bucket", "aws_region": "AWS Region", "aws_secret_key": "AWS Secret Key"}}})
class SizeTest(unittest.TestCase):
def test_empty(self):
self.assertEqual(get_size([]), ([], 0))
def test_no_size(self):
self.assertEqual(get_size(["blueprint", "type", "imagename", "profile"]),
(["blueprint", "type", "imagename", "profile"], 0))
def test_size_later(self):
self.assertEqual(get_size(["start", "--size", "100", "type"]), (["start", "type"], 104857600))
def test_size_no_value(self):
with self.assertRaises(RuntimeError):
get_size(["--size"])
def test_size_nonint(self):
with self.assertRaises(ValueError):
get_size(["--size", "abc"])
def test_get_size(self):
self.assertEqual(get_size(["--size", "1912", "blueprint", "type"]),
(["blueprint", "type"], 2004877312))

View File

@ -1,36 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import unittest
from composer.http_client import api_url, get_filename
headers = {'content-disposition': 'attachment; filename=e7b9b9b0-5867-493d-89c3-115cfe9227d7-metadata.tar;',
'access-control-max-age': '21600',
'transfer-encoding': 'chunked',
'date': 'Tue, 13 Mar 2018 17:37:18 GMT',
'access-control-allow-origin': '*',
'access-control-allow-methods': 'HEAD, OPTIONS, GET',
'content-type': 'application/x-tar'}
class HttpClientTest(unittest.TestCase):
def test_api_url(self):
"""Return the API url including the API version"""
self.assertEqual(api_url("0", "/path/to/enlightenment"), "/api/v0/path/to/enlightenment")
def test_get_filename(self):
"""Return the filename from a content-disposition header"""
self.assertEqual(get_filename(headers), "e7b9b9b0-5867-493d-89c3-115cfe9227d7-metadata.tar")

View File

@ -1,131 +0,0 @@
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import unittest
from composer.cli.utilities import argify, toml_filename, frozen_toml_filename, packageNEVRA
from composer.cli.utilities import handle_api_result, get_arg
INVALID_CHARS = "InvalidChars"
class CliUtilitiesTest(unittest.TestCase):
def test_argify(self):
"""Convert an optionally comma-separated cmdline into a list of args"""
self.assertEqual(argify(["one,two", "three", ",four", ",five,"]), ["one", "two", "three", "four", "five"])
def test_toml_filename(self):
"""Return the recipe's toml filename"""
self.assertEqual(toml_filename("http server"), "http-server.toml")
def test_frozen_toml_filename(self):
"""Return the recipe's frozen toml filename"""
self.assertEqual(frozen_toml_filename("http server"), "http-server.frozen.toml")
def test_packageNEVRA(self):
"""Return a string with the NVRA or NEVRA"""
epoch_0 = {"arch": "noarch",
"epoch": 0,
"name": "basesystem",
"release": "7.el7",
"version": "10.0"}
epoch_3 = {"arch": "noarch",
"epoch": 3,
"name": "basesystem",
"release": "7.el7",
"version": "10.0"}
self.assertEqual(packageNEVRA(epoch_0), "basesystem-10.0-7.el7.noarch")
self.assertEqual(packageNEVRA(epoch_3), "basesystem-3:10.0-7.el7.noarch")
def test_api_result_1(self):
"""Test a result with no status and no error fields"""
result = {"foo": "bar"}
self.assertEqual(handle_api_result(result, show_json=False), (0, False))
self.assertTrue(handle_api_result(result, show_json=False)[0] == 0)
def test_api_result_2(self):
"""Test a result with errors=[{"id": INVALID_CHARS, "msg": "some error"}], and no status field"""
result = {"foo": "bar", "errors": [{"id": INVALID_CHARS, "msg": "some error"}]}
self.assertEqual(handle_api_result(result, show_json=False), (1, False))
self.assertTrue(handle_api_result(result, show_json=False)[0] == 1)
def test_api_result_3(self):
"""Test a result with status=True, and errors=[]"""
result = {"status": True, "errors": []}
self.assertEqual(handle_api_result(result, show_json=False), (0, False))
def test_api_result_4(self):
"""Test a result with status=False, and errors=[]"""
result = {"status": False, "errors": []}
self.assertEqual(handle_api_result(result, show_json=False), (1, True))
def test_api_result_5(self):
"""Test a result with status=False, and errors=[{"id": INVALID_CHARS, "msg": "some error"}]"""
result = {"status": False, "errors": [{"id": INVALID_CHARS, "msg": "some error"}]}
self.assertEqual(handle_api_result(result, show_json=False), (1, True))
def test_api_result_6(self):
"""Test a result with show_json=True, and no status or errors fields"""
result = {"foo": "bar"}
self.assertEqual(handle_api_result(result, show_json=True), (0, True))
def test_api_result_7(self):
"""Test a result with show_json=True, status=False, and errors=[{"id": INVALID_CHARS, "msg": "some error"}]"""
result = {"status": False, "errors": [{"id": INVALID_CHARS, "msg": "some error"}]}
self.assertEqual(handle_api_result(result, show_json=True), (1, True))
def test_api_result_8(self):
"""Test a result with show_json=True, errors=[{"id": INVALID_CHARS, "msg": "some error"}], and no status field"""
result = {"foo": "bar", "errors": [{"id": INVALID_CHARS, "msg": "some error"}]}
self.assertEqual(handle_api_result(result, show_json=True), (1, True))
def test_api_result_9(self):
"""Test a result with show_json=True, errors=[], and no status field"""
result = {"foo": "bar", "errors": []}
self.assertEqual(handle_api_result(result, show_json=True), (0, True))
def test_get_arg_empty(self):
"""Test get_arg with no arguments"""
self.assertEqual(get_arg([], "--size"), ([], None))
def test_get_arg_no_arg(self):
"""Test get_arg with no argument in the list"""
self.assertEqual(get_arg(["first", "second"], "--size"), (["first", "second"], None))
def test_get_arg_notype(self):
"""Test get_arg with no argtype set"""
self.assertEqual(get_arg(["first", "--size", "100", "second"], "--size"), (["first", "second"], "100"))
def test_get_arg_string(self):
"""Test get_arg with a string argument"""
self.assertEqual(get_arg(["first", "--size", "100", "second"], "--size", str), (["first", "second"], "100"))
def test_get_arg_int(self):
"""Test get_arg with an int argument"""
self.assertEqual(get_arg(["first", "--size", "100", "second"], "--size", int), (["first", "second"], 100))
def test_get_arg_short(self):
"""Test get_arg error handling with a short list"""
with self.assertRaises(RuntimeError):
get_arg(["first", "--size", ], "--size", int)
def test_get_arg_start(self):
"""Test get_arg with the argument at the start of the list"""
self.assertEqual(get_arg(["--size", "100", "first", "second"], "--size", int), (["first", "second"], 100))
def test_get_arg_wrong_type(self):
"""Test get_arg with the wrong type"""
with self.assertRaises(ValueError):
get_arg(["first", "--size", "abc", "second"], "--size", int)

View File

@ -14,14 +14,10 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
from contextlib import contextmanager
import magic
from io import StringIO
import shutil
import subprocess
import tempfile
@contextmanager
def captured_output():
@ -46,86 +42,3 @@ def get_file_magic(filename):
finally:
ms.close()
return details
def create_git_repo():
"""Create a git repo in a tmpdir
Call this from setUpClass()
This returns the following fields:
* repodir - the directory holding the repository
* test_results - A dict with information to use for the tests
* first_commit - hash of the first commit
"""
repodir = tempfile.mkdtemp(prefix="git-rpm-test.")
# Create a local git repo in a temporary directory, populate it with files.
cmd = ["git", "init", repodir]
subprocess.check_call(cmd)
oldcwd = os.getcwd()
os.chdir(repodir)
cmd = ["git", "config", "user.email", "test@testing.localhost"]
subprocess.check_call(cmd)
# Hold the expected file paths for the tests
test_results = {"first": [], "second": [], "branch": []}
# Add some files
results_path = "./tests/pylorax/results/"
for f in ["full-recipe.toml", "minimal.toml", "modules-only.toml"]:
shutil.copy2(os.path.join(oldcwd, results_path, f), repodir)
test_results["first"].append(f)
cmd = ["git", "add", "*.toml"]
subprocess.check_call(cmd)
cmd = ["git", "commit", "-m", "first files"]
subprocess.check_call(cmd)
cmd = ["git", "tag", "v1.0.0"]
subprocess.check_call(cmd)
# Get the commit hash
cmd = ["git", "log", "--pretty=%H"]
first_commit = subprocess.check_output(cmd).decode("UTF-8").strip()
# 2nd commit adds to 1st commit
test_results["second"] = test_results["first"].copy()
# Add some more files
os.makedirs(os.path.join(repodir, "only-bps/"))
for f in ["packages-only.toml", "groups-only.toml"]:
shutil.copy2(os.path.join(oldcwd, results_path, f), os.path.join(repodir, "only-bps/"))
test_results["second"].append(os.path.join("only-bps/", f))
# Add a dotfile as well
with open(os.path.join(repodir, "only-bps/.bpsrc"), "w") as f:
f.write("dotfile test\n")
test_results["second"].append("only-bps/.bpsrc")
test_results["second"] = sorted(test_results["second"])
cmd = ["git", "add", "*.toml", "only-bps/.bpsrc"]
subprocess.check_call(cmd)
cmd = ["git", "commit", "-m", "second files"]
subprocess.check_call(cmd)
cmd = ["git", "tag", "v1.1.0"]
subprocess.check_call(cmd)
# Make a branch for some other files
cmd = ["git", "checkout", "-b", "custom-branch"]
subprocess.check_call(cmd)
# 3nd commit adds to 2nd commit
test_results["branch"] = test_results["second"].copy()
# Add some files to the new branch
for f in ["custom-base.toml", "repos-git.toml"]:
shutil.copy2(os.path.join(oldcwd, results_path, f), repodir)
test_results["branch"].append(f)
test_results["branch"] = sorted(test_results["branch"])
cmd = ["git", "add", "*.toml"]
subprocess.check_call(cmd)
cmd = ["git", "commit", "-m", "branch files"]
subprocess.check_call(cmd)
os.chdir(oldcwd)
return (repodir, test_results, first_commit)

View File

@ -5,7 +5,7 @@ set -eu
[ "$(id -u)" -eq 0 ] || (echo "$0 must be run as root"; exit 1)
BEAKERLIB_DIR=$(mktemp -d /tmp/composer-test.XXXXXX)
BEAKERLIB_DIR=$(mktemp -d /tmp/mkksiso-test.XXXXXX)
export BEAKERLIB_DIR
CLI="${CLI:-}"

View File

@ -10,7 +10,6 @@ class LoraxLintConfig(PocketLintConfig):
self.falsePositives = [ FalsePositive(r"Module 'pylorax' has no 'version' member"),
FalsePositive(r"Catching too general exception Exception"),
FalsePositive(r"Module 'composer' has no 'version' member"),
# See https://bugzilla.redhat.com/show_bug.cgi?id=1739167
FalsePositive(r"Module 'rpm' has no '.*' member"),
FalsePositive(r"raise-missing-from"),

View File

@ -1,111 +0,0 @@
#!/bin/bash
# Note: execute this file from the project root directory
# Note: Use test/check-cli && test/check-cloud if you want to
# execute test scenarios by hand!
set -eu
. $(dirname $0)/cli/lib/lib.sh
CLI="${CLI:-}"
function setup_tests {
[ "$BACKEND" == "osbuild-composer" ] && return 0
local share_dir=$1
# explicitly enable sshd for live-iso b/c it is disabled by default
# due to security concerns (no root password required)
sed -i.orig 's/^services.*/services --disabled="network" --enabled="NetworkManager,sshd"/' $share_dir/composer/live-iso.ks
# Make the live-iso boot more quickly (isolinux.cfg)
for cfg in "$share_dir"/templates.d/99-generic/live/config_files/*/isolinux.cfg; do
sed -i.orig 's/^timeout.*/timeout 20/' "$cfg"
done
# Make the live-iso boot more quickly (grub.cfg)
for cfg in "$share_dir"/templates.d/99-generic/live/config_files/*/grub.conf; do
sed -i.orig 's/^timeout.*/timeout 2/' "$cfg"
done
# Make the live-iso boot more quickly (grub2-efi.cfg)
for cfg in "$share_dir"/templates.d/99-generic/live/config_files/*/grub2-efi.cfg; do
sed -i.orig 's/^set timeout.*/set timeout=2/' "$cfg"
done
# explicitly enable logging in with empty passwords via ssh, because
# the default sshd setting for PermitEmptyPasswords is 'no'
awk -i inplace "
/%post/ && FLAG != 2 {FLAG=1}
/%end/ && FLAG == 1 {print \"sed -i 's/.*PermitEmptyPasswords.*/PermitEmptyPasswords yes/' /etc/ssh/sshd_config\"; FLAG=2}
{print}" \
$share_dir/composer/live-iso.ks
}
function teardown_tests {
[ "$BACKEND" == "osbuild-composer" ] && return 0
local share_dir=$1
mv $share_dir/composer/live-iso.ks.orig $share_dir/composer/live-iso.ks
# Restore all the configuration files
for cfg in "$share_dir"/templates.d/99-generic/live/config_files/*/*.orig; do
mv "$cfg" "${cfg%%.orig}"
done
}
# cloud credentials
if [ -f "~/.config/lorax-test-env" ]; then
. ~/.config/lorax-test-env
fi
if [ -f "/var/tmp/lorax-test-env" ]; then
. /var/tmp/lorax-test-env
fi
if [ -z "$CLI" ]; then
export top_srcdir=`pwd`
. ./tests/testenv.sh
export BLUEPRINTS_DIR=`mktemp -d '/tmp/composer-blueprints.XXXXX'`
cp ./tests/pylorax/blueprints/*.toml $BLUEPRINTS_DIR
export SHARE_DIR=`mktemp -d '/tmp/composer-share.XXXXX'`
cp -R ./share/* $SHARE_DIR
chmod a+rx -R $SHARE_DIR
setup_tests $SHARE_DIR
# start the backend daemon
composer_start
else
export PACKAGE="composer-cli"
export BLUEPRINTS_DIR="/var/lib/lorax/composer/blueprints"
composer_stop
setup_tests /usr/share/lorax
composer_start
fi
# Clean out the test-results directory
if [ -e "/var/tmp/test-results" ]; then
rm -rf "/var/tmp/test-results"
fi
setup_beakerlib_env
run_beakerlib_tests "$@"
if [ -z "$CLI" ]; then
# stop backend and remove /run/weldr/api.socket
# only if running against source
composer_stop
teardown_tests $SHARE_DIR
else
composer_stop
teardown_tests /usr/share/lorax
# start backend again so we can continue with manual or other kinds
# of testing on the same system
composer_start
fi
parse_beakerlib_results