mirror of
https://pagure.io/fedora-qa/createhdds.git
synced 2024-11-28 17:13:08 +00:00
modularize openqa_trigger -> fedora_openqa_schedule (T541)
Summary: This converts openqa_trigger into a fedora_openqa_schedule package which is properly modularized: there's a CLI module, a schedule module, a report module, and for now conf_test_ suites is its own module (though I think it's kind of ugly and we should turn it into a JSON file or something). ISO file download location configuration is now done with an optional config file, as with the splits it becomes a mess to try and pass it through from the CLI args. This also means custom ISO locations will be respected by other things we write which use the 'schedule' module. This includes a setup.py so the package and fedora-openqa- schedule command can be installed systemwide. We could now extend this to install stuff like the systemd services and little scripts like run-nightly.sh. Test Plan: Check that things work more or less as before. New CLI command is 'fedora-openqa-schedule'; it has the 'current' and 'compose' sub-commands, plus a new 'report' sub-command which works like calling report-job-results.py directly used to. Check that installing systemwide works properly. Check that ISO download location configuration works as expected. Running './fedora-openqa-schedule' from within the git checkout should also work. Reviewers: garretraziel, jskladan Reviewed By: garretraziel, jskladan Maniphest Tasks: T541 Differential Revision: https://phab.qadevel.cloud.fedoraproject.org/D547
This commit is contained in:
parent
2c09f465e1
commit
86999701d3
@ -1,335 +0,0 @@
|
|||||||
def default_install_cb(flavor):
|
|
||||||
"""Figure out the correct test case name for a default_boot_and_
|
|
||||||
install pass for a given flavor.
|
|
||||||
"""
|
|
||||||
(payload, imagetype) = flavor.split('_')
|
|
||||||
imagetype = imagetype.replace('boot', 'netinst')
|
|
||||||
imagetype = imagetype.replace('dvd', 'offline')
|
|
||||||
return "{0} {1}".format(payload, imagetype)
|
|
||||||
|
|
||||||
TESTCASES = {
|
|
||||||
"QA:Testcase_Boot_default_install": {
|
|
||||||
"name_cb": default_install_cb,
|
|
||||||
"section": 'Default boot and install',
|
|
||||||
"env": "$RUNARCH$",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_install_to_VirtIO": {
|
|
||||||
"section": "Storage devices",
|
|
||||||
"env": "x86",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_guided_empty": {
|
|
||||||
"section": "Guided storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical": {
|
|
||||||
"section": "User interface",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_Anaconda_user_creation": {
|
|
||||||
"section": "Miscellaneous",
|
|
||||||
"env": "x86",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_install_to_PATA": {
|
|
||||||
"section": "Storage devices",
|
|
||||||
"env": "x86",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_guided_delete_all": {
|
|
||||||
"section": "Guided storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_install_to_SATA": {
|
|
||||||
"section": "Storage devices",
|
|
||||||
"env": "x86",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_guided_multi_select": {
|
|
||||||
"section": "Guided storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_install_to_SCSI": {
|
|
||||||
"section": "Storage devices",
|
|
||||||
"env": "x86",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_Anaconda_updates.img_via_URL": {
|
|
||||||
"section": "Miscellaneous",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_kickstart_user_creation": {
|
|
||||||
"section": "Kickstart",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_Kickstart_Http_Server_Ks_Cfg": {
|
|
||||||
"section": "Kickstart",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_install_repository_Mirrorlist_graphical": {
|
|
||||||
"section": "Installation repositories",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_install_repository_HTTP/FTP_graphical": {
|
|
||||||
"section": "Installation repositories",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_install_repository_HTTP/FTP_variation": {
|
|
||||||
"section": "Installation repositories",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install": {
|
|
||||||
"section": "Package sets",
|
|
||||||
"env": "$RUNARCH$",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_guided_encrypted": {
|
|
||||||
"section": "Guided storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_guided_delete_partial": {
|
|
||||||
"section": "Guided storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_guided_free_space": {
|
|
||||||
"section": "Guided storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_guided_multi_empty_all": {
|
|
||||||
"section": "Guided storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_custom_software_RAID": {
|
|
||||||
"section": "Custom storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_custom_btrfs": {
|
|
||||||
"section": "Custom storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_custom_lvmthin": {
|
|
||||||
"section": "Custom storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_custom_standard_partition_ext3": {
|
|
||||||
"section": "Custom storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_partitioning_custom_no_swap": {
|
|
||||||
"section": "Custom storage configuration",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_Kickstart_Hd_Device_Path_Ks_Cfg": {
|
|
||||||
"section": "Kickstart",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_upgrade_fedup_cli_previous_minimal": {
|
|
||||||
"section": "Upgrade",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_upgrade_fedup_cli_previous_workstation": {
|
|
||||||
"section": "Upgrade",
|
|
||||||
"env": "x86 BIOS",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
"QA:Testcase_Anaconda_updates.img_via_local_media": {
|
|
||||||
"section": "Miscellaneous",
|
|
||||||
"env": "Result",
|
|
||||||
"type": "Installation",
|
|
||||||
},
|
|
||||||
# "": {
|
|
||||||
# "name_cb": callbackfunc # optional, called with 'flavor'
|
|
||||||
# "section": "",
|
|
||||||
# "env": "x86",
|
|
||||||
# "type": "Installation",
|
|
||||||
# },
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
TESTSUITES = {
|
|
||||||
"default_install": [
|
|
||||||
"QA:Testcase_Boot_default_install",
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
],
|
|
||||||
"package_set_minimal": [
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_delete_pata": [
|
|
||||||
"QA:Testcase_install_to_PATA",
|
|
||||||
"QA:Testcase_partitioning_guided_delete_all",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_sata_multi": [
|
|
||||||
"QA:Testcase_install_to_SATA",
|
|
||||||
"QA:Testcase_partitioning_guided_multi_select",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_scsi_updates_img": [
|
|
||||||
"QA:Testcase_install_to_SCSI",
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_Anaconda_updates.img_via_URL",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_kickstart_user_creation": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_kickstart_user_creation",
|
|
||||||
"QA:Testcase_Kickstart_Http_Server_Ks_Cfg",
|
|
||||||
],
|
|
||||||
"server_mirrorlist_graphical": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_install_repository_Mirrorlist_graphical",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_repository_http_graphical": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_install_repository_HTTP/FTP_graphical",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_repository_http_variation": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_install_repository_HTTP/FTP_variation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_mirrorlist_http_variation": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_install_repository_HTTP/FTP_variation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_simple_encrypted": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_empty",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
"QA:Testcase_partitioning_guided_encrypted",
|
|
||||||
],
|
|
||||||
"server_delete_partial": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_delete_partial",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_simple_free_space": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_free_space",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_multi_empty": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_guided_multi_empty_all",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_software_raid": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_custom_software_RAID",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_kickstart_hdd": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_kickstart_user_creation",
|
|
||||||
"QA:Testcase_Kickstart_Hd_Device_Path_Ks_Cfg",
|
|
||||||
],
|
|
||||||
"fedup_minimal_64bit": [
|
|
||||||
"QA:Testcase_upgrade_fedup_cli_previous_minimal",
|
|
||||||
],
|
|
||||||
"fedup_desktop_64bit": [
|
|
||||||
"QA:Testcase_upgrade_fedup_cli_previous_workstation",
|
|
||||||
],
|
|
||||||
"fedup_minimal_32bit": [
|
|
||||||
"QA:Testcase_upgrade_fedup_cli_previous_minimal",
|
|
||||||
],
|
|
||||||
"fedup_desktop_32bit": [
|
|
||||||
"QA:Testcase_upgrade_fedup_cli_previous_workstation",
|
|
||||||
],
|
|
||||||
"server_btrfs": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_custom_btrfs",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_lvmthin": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_custom_lvmthin",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_ext3": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_custom_standard_partition_ext3",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_updates_img_local": [
|
|
||||||
"QA:Testcase_Anaconda_updates.img_via_local_media",
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
"server_no_swap": [
|
|
||||||
"QA:Testcase_install_to_VirtIO",
|
|
||||||
"QA:Testcase_partitioning_custom_no_swap",
|
|
||||||
"QA:Testcase_Anaconda_User_Interface_Graphical",
|
|
||||||
"QA:Testcase_Anaconda_user_creation",
|
|
||||||
"QA:Testcase_Package_Sets_Minimal_Package_Install",
|
|
||||||
],
|
|
||||||
}
|
|
@ -1,363 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import json
|
|
||||||
import urlgrabber
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
import argparse
|
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
# We can at least find images and run OpenQA jobs without wikitcms
|
|
||||||
try:
|
|
||||||
import wikitcms.wiki
|
|
||||||
except ImportError:
|
|
||||||
wikitcms = None
|
|
||||||
import fedfind.exceptions
|
|
||||||
import fedfind.release
|
|
||||||
|
|
||||||
from openqa_client.client import OpenQA_Client
|
|
||||||
from report_job_results import report_results
|
|
||||||
|
|
||||||
PERSISTENT = "/var/tmp/openqa_watcher.json"
|
|
||||||
ISO_PATH = "/var/lib/openqa/factory/iso/"
|
|
||||||
ARCHES = ['x86_64', 'i386']
|
|
||||||
|
|
||||||
|
|
||||||
class TriggerException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# read last tested version from file
|
|
||||||
def read_last():
|
|
||||||
logging.debug("reading latest checked version from %s", PERSISTENT)
|
|
||||||
result = {}
|
|
||||||
try:
|
|
||||||
f = open(PERSISTENT, "r")
|
|
||||||
json_raw = f.read()
|
|
||||||
f.close()
|
|
||||||
json_parsed = json.loads(json_raw)
|
|
||||||
except IOError:
|
|
||||||
logging.warning("cannot read file %s", PERSISTENT)
|
|
||||||
return result, {}
|
|
||||||
|
|
||||||
for arch in ARCHES:
|
|
||||||
result[arch] = json_parsed.get(arch, None)
|
|
||||||
logging.info("latest version for %s: %s", arch, result[arch])
|
|
||||||
return result, json_parsed
|
|
||||||
|
|
||||||
|
|
||||||
def download_image(image):
|
|
||||||
"""Download a given image with a name that should be unique.
|
|
||||||
Returns the filename of the image (not the path).
|
|
||||||
"""
|
|
||||||
ver = image.version.replace(' ', '_')
|
|
||||||
if image.imagetype == 'boot':
|
|
||||||
isoname = "{0}_{1}_{2}_boot.iso".format(ver, image.payload, image.arch)
|
|
||||||
else:
|
|
||||||
isoname = "{0}_{1}".format(ver, image.filename)
|
|
||||||
filename = os.path.join(ISO_PATH, isoname)
|
|
||||||
if not os.path.isfile(filename):
|
|
||||||
logging.info("downloading %s (%s) to %s", image.url, image.desc, filename)
|
|
||||||
# Icky hack around a urlgrabber bug:
|
|
||||||
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=715416
|
|
||||||
urlgrabber.urlgrab(image.url.replace('https', 'http'), filename)
|
|
||||||
else:
|
|
||||||
logging.info("%s already exists", filename)
|
|
||||||
return isoname
|
|
||||||
|
|
||||||
|
|
||||||
def run_openqa_jobs(client, isoname, flavor, arch, build):
|
|
||||||
"""# run OpenQA 'isos' job on selected isoname, with given arch
|
|
||||||
and a version string. **NOTE**: the version passed to OpenQA as
|
|
||||||
BUILD and is parsed back into the 'relval report-auto' arguments
|
|
||||||
by report_job_results.py; it is expected to be in the form of a
|
|
||||||
3-tuple on which join('_') has been run, and the three elements
|
|
||||||
will be passed as --release, --compose and --milestone. Returns
|
|
||||||
list of job IDs.
|
|
||||||
"""
|
|
||||||
logging.info("sending jobs on OpenQA")
|
|
||||||
|
|
||||||
# starts OpenQA jobs
|
|
||||||
params = {
|
|
||||||
'ISO': isoname,
|
|
||||||
'DISTRI': 'fedora',
|
|
||||||
'VERSION': build.split('_')[0],
|
|
||||||
'FLAVOR': flavor,
|
|
||||||
'ARCH': arch,
|
|
||||||
'BUILD': build
|
|
||||||
}
|
|
||||||
output = client.openqa_request('POST', 'isos', params)
|
|
||||||
|
|
||||||
logging.debug("executed")
|
|
||||||
logging.info("planned jobs: %s", output["ids"])
|
|
||||||
|
|
||||||
return output["ids"]
|
|
||||||
|
|
||||||
|
|
||||||
def jobs_from_current(wiki, client):
|
|
||||||
"""Schedule jobs against the 'current' release validation event
|
|
||||||
(according to wikitcms) if we have not already. Returns the job
|
|
||||||
list.
|
|
||||||
"""
|
|
||||||
if not wiki:
|
|
||||||
logging.warning("python-wikitcms is required for current validation event discovery.")
|
|
||||||
return ([], None)
|
|
||||||
last_versions, json_parsed = read_last()
|
|
||||||
currev = wiki.current_event
|
|
||||||
logging.info("current event: %s", currev.version)
|
|
||||||
runarches = []
|
|
||||||
for arch in ARCHES:
|
|
||||||
last_version = last_versions.get(arch, None)
|
|
||||||
if last_version and last_version >= currev.sortname:
|
|
||||||
logging.info("skipped: %s: %s is newer or equal to %s",
|
|
||||||
arch, last_version, currev.sortname)
|
|
||||||
else:
|
|
||||||
runarches.append(arch)
|
|
||||||
logging.debug("%s will be tested in version %s", arch, currev.sortname)
|
|
||||||
json_parsed[arch] = currev.sortname
|
|
||||||
|
|
||||||
jobs = []
|
|
||||||
|
|
||||||
if not runarches:
|
|
||||||
raise TriggerException("Skipped all arches, nothing to do.")
|
|
||||||
|
|
||||||
jobs = jobs_from_fedfind(currev.ff_release, client, runarches)
|
|
||||||
logging.info("planned jobs: %s", ' '.join(str(j) for j in jobs))
|
|
||||||
|
|
||||||
# write info about latest versions
|
|
||||||
f = open(PERSISTENT, "w")
|
|
||||||
f.write(json.dumps(json_parsed))
|
|
||||||
f.close()
|
|
||||||
logging.debug("written info about newest version")
|
|
||||||
|
|
||||||
return jobs
|
|
||||||
|
|
||||||
|
|
||||||
def jobs_from_fedfind(ff_release, client, arches=ARCHES):
|
|
||||||
"""Given a fedfind.Release object, find the ISOs we want and run
|
|
||||||
jobs on them. arches is an iterable of arches to run on, if not
|
|
||||||
specified, we'll use our constant.
|
|
||||||
"""
|
|
||||||
# Find currently-testable images for our arches.
|
|
||||||
jobs = []
|
|
||||||
queries = (
|
|
||||||
fedfind.release.Query('imagetype', ('boot', 'live')),
|
|
||||||
fedfind.release.Query('arch', arches),
|
|
||||||
fedfind.release.Query('payload', ('server', 'generic', 'workstation')))
|
|
||||||
logging.debug("querying fedfind for images")
|
|
||||||
images = ff_release.find_images(queries)
|
|
||||||
|
|
||||||
if len(images) == 0:
|
|
||||||
raise TriggerException("no available images")
|
|
||||||
|
|
||||||
# Now schedule jobs. First, let's get the BUILD value for openQA.
|
|
||||||
build = '_'.join((ff_release.release, ff_release.milestone, ff_release.compose))
|
|
||||||
|
|
||||||
# Next let's schedule the 'universal' tests.
|
|
||||||
# We have different images in different composes: nightlies only
|
|
||||||
# have a generic boot.iso, TC/RC builds have Server netinst/boot
|
|
||||||
# and DVD. We always want to run *some* tests -
|
|
||||||
# default_boot_and_install at least - for all images we find, then
|
|
||||||
# we want to run all the tests that are not image-dependent on
|
|
||||||
# just one image. So we have a special 'universal' flavor and
|
|
||||||
# product in openQA; all the image-independent test suites run for
|
|
||||||
# that product. Here, we find the 'best' image we can for the
|
|
||||||
# compose we're running on (a DVD if possible, a boot.iso or
|
|
||||||
# netinst if not), and schedule the 'universal' jobs on that
|
|
||||||
# image.
|
|
||||||
for arch in arches:
|
|
||||||
okimgs = (img for img in images if img.arch == arch and
|
|
||||||
any(img.imagetype == okt for okt in ('dvd', 'boot', 'netinst')))
|
|
||||||
bestscore = 0
|
|
||||||
bestimg = None
|
|
||||||
for img in okimgs:
|
|
||||||
if img.imagetype == 'dvd':
|
|
||||||
score = 10
|
|
||||||
else:
|
|
||||||
score = 1
|
|
||||||
if img.payload == 'generic':
|
|
||||||
score += 5
|
|
||||||
elif img.payload == 'server':
|
|
||||||
score += 3
|
|
||||||
elif img.payload == 'workstation':
|
|
||||||
score += 1
|
|
||||||
if score > bestscore:
|
|
||||||
bestimg = img
|
|
||||||
bestscore = score
|
|
||||||
if not bestimg:
|
|
||||||
logging.warn("no universal tests image found for %s", arch)
|
|
||||||
continue
|
|
||||||
logging.info("running universal tests for %s with %s", arch, bestimg.desc)
|
|
||||||
isoname = download_image(bestimg)
|
|
||||||
job_ids = run_openqa_jobs(client, isoname, 'universal', arch, build)
|
|
||||||
jobs.extend(job_ids)
|
|
||||||
|
|
||||||
# Now schedule per-image jobs.
|
|
||||||
for image in images:
|
|
||||||
isoname = download_image(image)
|
|
||||||
flavor = '_'.join((image.payload, image.imagetype))
|
|
||||||
job_ids = run_openqa_jobs(client, isoname, flavor, image.arch, build)
|
|
||||||
jobs.extend(job_ids)
|
|
||||||
return jobs
|
|
||||||
|
|
||||||
|
|
||||||
# SUB-COMMAND FUNCTIONS
|
|
||||||
|
|
||||||
|
|
||||||
def run_current(args, client, wiki):
|
|
||||||
"""run OpenQA for current release validation event, if we have
|
|
||||||
not already done it.
|
|
||||||
"""
|
|
||||||
logging.info("running on current release")
|
|
||||||
try:
|
|
||||||
jobs = jobs_from_current(wiki, client)
|
|
||||||
except TriggerException as e:
|
|
||||||
logging.debug("No jobs run: %s", e)
|
|
||||||
sys.exit(1)
|
|
||||||
# wait for jobs to finish and display results
|
|
||||||
if jobs:
|
|
||||||
logging.info("waiting for jobs: %s", ' '.join(str(j) for j in jobs))
|
|
||||||
report_results(jobs, client)
|
|
||||||
logging.debug("finished")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
|
|
||||||
def run_compose(args, client, wiki=None):
|
|
||||||
"""run OpenQA on a specified compose, optionally reporting results
|
|
||||||
if a matching wikitcms ValidationEvent is found by relval/wikitcms
|
|
||||||
"""
|
|
||||||
# get the fedfind release object
|
|
||||||
try:
|
|
||||||
logging.debug("querying fedfind on specific compose: %s %s %s", args.release,
|
|
||||||
args.milestone, args.compose)
|
|
||||||
ff_release = fedfind.release.get_release(release=args.release, milestone=args.milestone,
|
|
||||||
compose=args.compose)
|
|
||||||
except ValueError as err:
|
|
||||||
logging.critical("compose %s %s %s was not found", args.release, args.milestone,
|
|
||||||
args.compose)
|
|
||||||
sys.exit(err[0])
|
|
||||||
|
|
||||||
logging.info("running on compose: %s", ff_release.version)
|
|
||||||
|
|
||||||
if args.ifnotcurrent:
|
|
||||||
try:
|
|
||||||
currev = wiki.current_event
|
|
||||||
# Compare currev's fedfind release version with ours
|
|
||||||
if currev.ff_release.version == ff_release.version:
|
|
||||||
logging.info("Compose is the current validation compose. Exiting.")
|
|
||||||
sys.exit()
|
|
||||||
except AttributeError:
|
|
||||||
sys.exit("Wikitcms is required for --ifnotcurrent.")
|
|
||||||
|
|
||||||
if args.wait:
|
|
||||||
logging.info("Waiting up to %s mins for compose", str(args.wait))
|
|
||||||
try:
|
|
||||||
ff_release.wait(waittime=args.wait)
|
|
||||||
except fedfind.exceptions.WaitError:
|
|
||||||
sys.exit("Waited too long for compose to appear!")
|
|
||||||
|
|
||||||
jobs = []
|
|
||||||
try:
|
|
||||||
if args.arch:
|
|
||||||
jobs = jobs_from_fedfind(ff_release, client, [args.arch])
|
|
||||||
else:
|
|
||||||
jobs = jobs_from_fedfind(ff_release, client)
|
|
||||||
except TriggerException as e:
|
|
||||||
logging.debug("No jobs run: %s", e)
|
|
||||||
sys.exit(1)
|
|
||||||
logging.info("planned jobs: %s", ' '.join(str(j) for j in jobs))
|
|
||||||
if args.submit_results:
|
|
||||||
report_results(jobs, client)
|
|
||||||
logging.debug("finished")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
test_help = "Operate on the staging wiki (for testing)"
|
|
||||||
parser = argparse.ArgumentParser(description=(
|
|
||||||
"Run OpenQA tests for a release validation test event."))
|
|
||||||
subparsers = parser.add_subparsers()
|
|
||||||
|
|
||||||
parser_current = subparsers.add_parser(
|
|
||||||
'current', description="Run for the current event, if needed.")
|
|
||||||
parser_current.set_defaults(func=run_current)
|
|
||||||
|
|
||||||
parser_compose = subparsers.add_parser(
|
|
||||||
'compose', description="Run for a specific compose (TC/RC or nightly)."
|
|
||||||
" If a matching release validation test event can be found and "
|
|
||||||
"--submit-results is passed, results will be reported.")
|
|
||||||
parser_compose.add_argument(
|
|
||||||
'-r', '--release', type=int, required=False, choices=range(12, 100),
|
|
||||||
metavar="12-99", help="Release number of a specific compose to run "
|
|
||||||
"against. Must be passed for validation event discovery to succeed.")
|
|
||||||
parser_compose.add_argument(
|
|
||||||
'-m', '--milestone', help="The milestone to operate on (Alpha, Beta, "
|
|
||||||
"Final, Branched, Rawhide). Must be specified for a TC/RC; for a "
|
|
||||||
"nightly, will be guessed if not specified", required=False,
|
|
||||||
choices=['Alpha', 'Beta', 'Final', 'Branched', 'Rawhide'])
|
|
||||||
parser_compose.add_argument(
|
|
||||||
'-c', '--compose', help="The version to run for; either the compose "
|
|
||||||
"(for a TC/RC), or the date (for a nightly build)", required=False,
|
|
||||||
metavar="{T,R}C1-19 or YYYYMMDD")
|
|
||||||
parser_compose.add_argument(
|
|
||||||
'-a', '--arch', help="The arch to run for", required=False,
|
|
||||||
choices=('x86_64', 'i386'))
|
|
||||||
parser_compose.add_argument(
|
|
||||||
'-s', '--submit-results', help="Submit the results to the release "
|
|
||||||
"validation event for this compose, if possible", required=False,
|
|
||||||
action='store_true')
|
|
||||||
parser_compose.add_argument(
|
|
||||||
'-w', '--wait', help="Wait NN minutes for the compose to appear, if "
|
|
||||||
"it doesn't yet exist", type=int, metavar="NN", default=0,
|
|
||||||
required=False)
|
|
||||||
parser_compose.add_argument(
|
|
||||||
'-i', '--ifnotcurrent', help="Only run if the compose is not the "
|
|
||||||
"'current' validation compose. Mainly intended for cron runs on "
|
|
||||||
"nightly builds, to avoid duplicating jobs run by a 'current' "
|
|
||||||
"cron job. Requires wikitcms", action='store_true')
|
|
||||||
parser_compose.set_defaults(func=run_compose)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
'-t', '--test', help=test_help, required=False, action='store_true')
|
|
||||||
parser.add_argument(
|
|
||||||
'-f', '--log-file', help="If given, log into specified file. When not provided, stdout"
|
|
||||||
" is used", required=False)
|
|
||||||
parser.add_argument(
|
|
||||||
'-l', '--log-level', help="Specify log level to be outputted", required=False)
|
|
||||||
parser.add_argument('-i', '--iso-directory', help="Directory for downloading isos, default"
|
|
||||||
" is %s" % PERSISTENT, required=False)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if args.log_level:
|
|
||||||
log_level = getattr(logging, args.log_level.upper(), None)
|
|
||||||
if not isinstance(log_level, int):
|
|
||||||
log_level = logging.INFO
|
|
||||||
else:
|
|
||||||
log_level = logging.INFO
|
|
||||||
if args.log_file:
|
|
||||||
logging.basicConfig(format="%(levelname)s:%(name)s:%(asctime)s:%(message)s",
|
|
||||||
filename=args.log_file, level=log_level)
|
|
||||||
else:
|
|
||||||
logging.basicConfig(level=log_level)
|
|
||||||
|
|
||||||
if args.iso_directory:
|
|
||||||
ISO_PATH = args.iso_directory
|
|
||||||
|
|
||||||
wiki = None
|
|
||||||
if args.test:
|
|
||||||
logging.debug("using test wiki")
|
|
||||||
if wikitcms:
|
|
||||||
wiki = wikitcms.wiki.Wiki(('https', 'stg.fedoraproject.org'), '/w/')
|
|
||||||
else:
|
|
||||||
logging.warn("wikitcms not found, reporting to wiki disabled")
|
|
||||||
else:
|
|
||||||
if wikitcms:
|
|
||||||
wiki = wikitcms.wiki.Wiki(('https', 'fedoraproject.org'), '/w/')
|
|
||||||
else:
|
|
||||||
logging.warn("wikitcms not found, reporting to wiki disabled")
|
|
||||||
|
|
||||||
client = OpenQA_Client() # uses first server from ~/.config/openqa/client.conf
|
|
||||||
|
|
||||||
args.func(args, client, wiki)
|
|
@ -1,103 +0,0 @@
|
|||||||
import argparse
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
import conf_test_suites
|
|
||||||
|
|
||||||
from operator import attrgetter
|
|
||||||
from openqa_client.client import OpenQA_Client
|
|
||||||
from wikitcms.wiki import Wiki, ResTuple
|
|
||||||
|
|
||||||
SLEEPTIME = 60
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_passed_testcases(job_ids, client):
|
|
||||||
"""
|
|
||||||
job_ids ~ list of int (job ids)
|
|
||||||
Returns ~ list of str - names of passed testcases
|
|
||||||
"""
|
|
||||||
running_jobs = dict([(job_id, "jobs/%s" % job_id) for job_id in job_ids])
|
|
||||||
logger.info("running jobs: %s", running_jobs)
|
|
||||||
finished_jobs = {}
|
|
||||||
|
|
||||||
while running_jobs:
|
|
||||||
for job_id, url in running_jobs.items():
|
|
||||||
output = client.openqa_request('GET', url)
|
|
||||||
job_state = output['job']
|
|
||||||
if job_state['state'] in ('done', 'cancelled'):
|
|
||||||
logger.info("job %s is done", job_id)
|
|
||||||
finished_jobs[job_id] = job_state
|
|
||||||
del running_jobs[job_id]
|
|
||||||
if running_jobs:
|
|
||||||
time.sleep(SLEEPTIME)
|
|
||||||
logger.info("all jobs finished")
|
|
||||||
|
|
||||||
passed_testcases = set()
|
|
||||||
for job_id in job_ids:
|
|
||||||
job = finished_jobs[job_id]
|
|
||||||
if job['result'] == 'passed':
|
|
||||||
(release, milestone, compose) = job['settings']['BUILD'].split('_')
|
|
||||||
testsuite = job['settings']['TEST']
|
|
||||||
arch = job['settings']['ARCH']
|
|
||||||
flavor = job['settings']['FLAVOR']
|
|
||||||
|
|
||||||
for testcase in conf_test_suites.TESTSUITES[testsuite]:
|
|
||||||
# each 'testsuite' is a list using testcase names to indicate which Wikitcms tests
|
|
||||||
# have passed if this job passes. Each testcase name is the name of a dict in the
|
|
||||||
# TESTCASES dict-of-dicts which more precisely identifies the 'test instance' (when
|
|
||||||
# there is more than one for a testcase) and environment for which the result
|
|
||||||
# should be filed.
|
|
||||||
uniqueres = conf_test_suites.TESTCASES[testcase]
|
|
||||||
testname = ''
|
|
||||||
if 'name_cb' in uniqueres:
|
|
||||||
testname = uniqueres['name_cb'](flavor)
|
|
||||||
env = arch if uniqueres['env'] == '$RUNARCH$' else uniqueres['env']
|
|
||||||
result = ResTuple(
|
|
||||||
testtype=uniqueres['type'], release=release, milestone=milestone,
|
|
||||||
compose=compose, testcase=testcase, section=uniqueres['section'],
|
|
||||||
testname=testname, env=env, status='pass', bot=True)
|
|
||||||
passed_testcases.add(result)
|
|
||||||
|
|
||||||
return sorted(list(passed_testcases), key=attrgetter('testcase'))
|
|
||||||
|
|
||||||
|
|
||||||
def report_results(job_ids, client, verbose=False, report=True):
|
|
||||||
passed_testcases = get_passed_testcases(job_ids, client)
|
|
||||||
if verbose:
|
|
||||||
for restup in passed_testcases:
|
|
||||||
print restup
|
|
||||||
logger.info("passed testcases: %s", passed_testcases)
|
|
||||||
|
|
||||||
if report:
|
|
||||||
if verbose:
|
|
||||||
print "Reporting test passes:"
|
|
||||||
logger.info("reporting test passes")
|
|
||||||
wiki = Wiki()
|
|
||||||
wiki.login()
|
|
||||||
if not wiki.logged_in:
|
|
||||||
logger.error("could not log in to wiki")
|
|
||||||
sys.exit("Could not log in to wiki!")
|
|
||||||
|
|
||||||
# Submit the results
|
|
||||||
(insuffs, dupes) = wiki.report_validation_results(passed_testcases)
|
|
||||||
for dupe in dupes:
|
|
||||||
tmpl = "already reported result for test %s, env %s! Will not report dupe."
|
|
||||||
if verbose:
|
|
||||||
print tmpl % (dupe.testcase, dupe.env)
|
|
||||||
logger.info(tmpl, dupe.testcase, dupe.env)
|
|
||||||
|
|
||||||
else:
|
|
||||||
if verbose:
|
|
||||||
print "\n\n### No reporting is done! ###\n\n"
|
|
||||||
logger.warning("no reporting is done")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Evaluate per-testcase results from OpenQA job "
|
|
||||||
"runs")
|
|
||||||
parser.add_argument('jobs', type=int, nargs='+')
|
|
||||||
parser.add_argument('--report', default=False, action='store_true')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
client = OpenQA_Client() # uses first server from ~/.config/openqa/client.conf
|
|
||||||
report_results(args.jobs, client, verbose=True, report=args.report)
|
|
Loading…
Reference in New Issue
Block a user