mirror of
https://pagure.io/fedora-qa/createhdds.git
synced 2024-11-13 20:34:21 +00:00
b54aed6aa1
The basic approach is that openqa_trigger gets a ValidationEvent from python-wikitcms - either the Wiki.current_event property for 'current', or the event specified, obtained via the newly-added Wiki.get_validation_event(), for 'event'. For 'event' it then just goes ahead and runs the jobs and prints the IDs. For 'current' it checks the last run compose version for each arch and runs if needed, as before. The ValidationEvent's 'sortname' property is the value written out to PERSISTENT to track the 'last run' - this property is intended to always sort compose events 'correctly', so we should always run when appropriate even when going from Rawhide to Branched, Branched to a TC, TC to RC, RC to (next milestone) TC. On both paths it gets a fedfind.Release object via the ValidationEvent - ValidationEvents have a ff_release property which is the fedfind.Release object that matches that event. It then queries fedfind for image locations using a query that tries to get just *one* generic-ish network install image for each arch. It passes the location to download_image(), which is just download_rawhide_iso() renamed and does the same job, only it can be simpler now. From there it works pretty much as before, except we use the ValidationEvent's 'version' property as the BUILD setting for OpenQA, and report_job_results get_relval_commands() is tweaked slightly to parse this properly to produce a correct report-auto command. Probably the most likely bits to break here are the sortname thing (see wikitcms helpers.py fedora_release_sort(), it's pretty stupid, I should re-write it) and the image query, which might wind up getting more than one image depending on how exactly the F22 Alpha composes look. I'll keep a close eye on that. We can always take the list from fedfind and further filter it so we have just one image per arch. Image objects have a .arch attribute so this will be easy to do if necessary. I *could* give the fedfind query code a 'I'm feeling lucky'- ish mode to only return one image per (whatever), but not sure if that would be too specialized, I'll think about it.
231 lines
8.7 KiB
Python
Executable File
231 lines
8.7 KiB
Python
Executable File
#!/usr/bin/env python
|
|
|
|
import json
|
|
import urllib2
|
|
import re
|
|
import urlgrabber
|
|
import os.path
|
|
import sys
|
|
import subprocess
|
|
import argparse
|
|
# We can at least find images and run OpenQA jobs without wikitcms
|
|
try:
|
|
import wikitcms.wiki
|
|
except:
|
|
pass
|
|
import fedfind.release
|
|
|
|
from report_job_results import report_results
|
|
|
|
PERSISTENT = "/var/tmp/openqa_watcher.json"
|
|
ISO_PATH = "/var/lib/openqa/factory/iso/"
|
|
RUN_COMMAND = "/var/lib/openqa/script/client isos post ISO=%s DISTRI=fedora VERSION=rawhide FLAVOR=server ARCH=%s BUILD=%s"
|
|
VERSIONS = ['i386', 'x86_64']
|
|
|
|
# read last tested version from file
|
|
def read_last():
|
|
result = {}
|
|
try:
|
|
f = open(PERSISTENT, "r")
|
|
json_raw = f.read()
|
|
f.close()
|
|
json_parsed = json.loads(json_raw)
|
|
except IOError:
|
|
return result, {}
|
|
|
|
for version in VERSIONS:
|
|
result[version] = json_parsed.get(version, None)
|
|
return result, json_parsed
|
|
|
|
def download_image(image):
|
|
"""Download a given image with a name that should be unique for
|
|
this event and arch (until we start testing different images
|
|
for the same event and arch). Returns the filename of the image
|
|
(not the path).
|
|
"""
|
|
isoname = "{0}_{1}.iso".format(image.version.replace(' ', '_'), image.arch)
|
|
filename = os.path.join(ISO_PATH, isoname)
|
|
if not os.path.isfile(filename):
|
|
# Icky hack around a urlgrabber bug:
|
|
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=715416
|
|
urlgrabber.urlgrab(image.url.replace('https', 'http'), filename)
|
|
return isoname
|
|
|
|
def run_openqa_jobs(isoname, arch, image_version):
|
|
"""# run OpenQA 'isos' job on selected isoname, with given arch
|
|
and a version string. **NOTE**: the version passed to OpenQA as
|
|
BUILD and is parsed back into the 'relval report-auto' arguments
|
|
by report_job_results.py; it is expected to be in the form of a
|
|
3-tuple on which join('_') has been run, and the three elements
|
|
will be passed as --release, --compose and --milestone. Returns
|
|
list of job IDs.
|
|
"""
|
|
command = RUN_COMMAND % (isoname, arch, image_version)
|
|
|
|
# starts OpenQA jobs
|
|
output = subprocess.check_output(command.split())
|
|
|
|
# read ids from OpenQA to wait for
|
|
r = re.compile(r'ids => \[(?P<from>\d+)( \.\. (?P<to>\d+))?\]')
|
|
match = r.search(output)
|
|
if match and match.group('to'):
|
|
from_i = int(match.group('from'))
|
|
to_i = int(match.group('to')) + 1
|
|
return range(from_i, to_i)
|
|
elif match:
|
|
return [int(match.group('from'))]
|
|
else:
|
|
return []
|
|
|
|
# run OpenQA on current compose if it is newer version since last run
|
|
def run_current(args, wiki):
|
|
if not wiki:
|
|
sys.exit("python-wikitcms is required for --current. Try "
|
|
"--compose to run against today's Rawhide nightly "
|
|
"without wiki result submission.")
|
|
last_versions, json_parsed = read_last()
|
|
currev = wiki.current_event
|
|
print("Current event: {0}".format(currev.version))
|
|
runarches = []
|
|
for arch in VERSIONS:
|
|
last_version = last_versions.get(arch, None)
|
|
if last_version and last_version >= currev.sortname:
|
|
print("Skipped: {0}".format(arch))
|
|
else:
|
|
runarches.append(arch)
|
|
json_parsed[arch] = currev.sortname
|
|
jobs = jobs_from_fedfind(currev.ff_release, runarches)
|
|
|
|
# write info about latest versions
|
|
f = open(PERSISTENT, "w")
|
|
f.write(json.dumps(json_parsed))
|
|
f.close()
|
|
|
|
# wait for jobs to finish and display results
|
|
print jobs
|
|
report_results(jobs)
|
|
sys.exit()
|
|
|
|
def run_compose(args, wiki=None):
|
|
"""run OpenQA on a specified compose, optionally reporting results
|
|
if a matching wikitcms ValidationEvent can be found.
|
|
"""
|
|
# get the fedfind release object
|
|
try:
|
|
ff_release = fedfind.release.get_release(
|
|
release=args.release, milestone=args.milestone,
|
|
compose=args.compose)
|
|
except ValueError as err:
|
|
sys.exit(err[0])
|
|
|
|
if args.submit_results:
|
|
try:
|
|
# sanity check, there's...some voodoo in here. but this isn't
|
|
# really strictly necessary, and we don't use the event object
|
|
# for anything.
|
|
event = wiki.get_validation_event(
|
|
release=ff_release.release, milestone=ff_release.milestone,
|
|
compose=ff_release.compose)
|
|
evff = event.ff_release
|
|
if evff.version != ff_release.version:
|
|
print("Release validation event's fedfind object does not "
|
|
"match the one from fedfind's get_release(). Something's"
|
|
" wrong somewhere. Result submission disabled.")
|
|
args.submit_results = False
|
|
except ValueError:
|
|
print("Warning: could not find validation test event for this "
|
|
"compose. Continuing with OpenQA jobs, but results will "
|
|
" not be submitted to the wiki.")
|
|
args.submit_results = False
|
|
|
|
print("Running on compose: {0}".format(ff_release.version))
|
|
if args.arch:
|
|
jobs = jobs_from_fedfind(ff_release, [args.arch])
|
|
else:
|
|
jobs = jobs_from_fedfind(ff_release)
|
|
print(jobs)
|
|
if args.submit_results:
|
|
report_results(jobs)
|
|
sys.exit()
|
|
|
|
def jobs_from_fedfind(ff_release, arches=VERSIONS):
|
|
"""Given a fedfind.Release object, find the ISOs we want and run
|
|
jobs on them. arches is an iterable of arches to run on, if not
|
|
specified, we'll use our constant.
|
|
"""
|
|
# Find boot.iso images for our arches; third query is a bit of a
|
|
# bodge till I know what 22 TCs/RCs will actually look like,
|
|
# ideally we want a query that will reliably return one image per
|
|
# arch without us having to filter further, but we can always just
|
|
# take the first image for each arch if necessary
|
|
jobs = []
|
|
queries = (
|
|
fedfind.release.Query('imagetype', ('boot',)),
|
|
fedfind.release.Query('arch', arches),
|
|
fedfind.release.Query('payload', ('server', 'generic')))
|
|
|
|
for image in ff_release.find_images(queries):
|
|
print("{0} {1}".format(image.url, image.desc))
|
|
isoname = download_image(image)
|
|
version = '_'.join(
|
|
(ff_release.release, ff_release.milestone, ff_release.compose))
|
|
job_ids = run_openqa_jobs(isoname, image.arch, version)
|
|
jobs.extend(job_ids)
|
|
return jobs
|
|
|
|
if __name__ == "__main__":
|
|
test_help = "Operate on the staging wiki (for testing)"
|
|
parser = argparse.ArgumentParser(description=(
|
|
"Run OpenQA tests for a release validation test event."))
|
|
subparsers = parser.add_subparsers()
|
|
|
|
parser_current = subparsers.add_parser(
|
|
'current', description="Run for the current event, if needed.")
|
|
parser_current.add_argument(
|
|
'-t', '--test', help=test_help, required=False, action='store_true')
|
|
parser_current.set_defaults(func=run_current)
|
|
|
|
parser_compose = subparsers.add_parser(
|
|
'compose', description="Run for a specific compose (TC/RC or nightly)."
|
|
" If a matching release validation test event can be found and "
|
|
"--submit-results is passed, results will be reported.")
|
|
parser_compose.add_argument(
|
|
'-r', '--release', type=int, required=False, choices=range(12, 100),
|
|
metavar="12-99", help="Release number of a specific compose to run "
|
|
"against. Must be passed for validation event discovery to succeed.")
|
|
parser_compose.add_argument(
|
|
'-m', '--milestone', help="The milestone to operate on (Alpha, Beta, "
|
|
"Final, Branched, Rawhide). Must be specified for a TC/RC; for a "
|
|
"nightly, will be guessed if not specified", required=False,
|
|
choices=['Alpha', 'Beta', 'Final', 'Branched', 'Rawhide'])
|
|
parser_compose.add_argument(
|
|
'-c', '--compose', help="The version to run for; either the compose "
|
|
"(for a TC/RC), or the date (for a nightly build)", required=False,
|
|
metavar="{T,R}C1-19 or YYYYMMDD")
|
|
parser_compose.add_argument(
|
|
'-a', '--arch', help="The arch to run for", required=False,
|
|
choices=('x86_64', 'i386'))
|
|
parser_compose.add_argument(
|
|
'-s', '--submit-results', help="Submit the results to the release "
|
|
"validation event for this compose, if possible", required=False,
|
|
action='store_true')
|
|
parser_compose.add_argument(
|
|
'-t', '--test', help=test_help, required=False, action='store_true')
|
|
parser_compose.set_defaults(func=run_compose)
|
|
|
|
args = parser.parse_args()
|
|
|
|
wiki = None
|
|
if args.test:
|
|
try:
|
|
wiki = wikitcms.wiki.Wiki(('https', 'stg.fedoraproject.org'),
|
|
'/w/')
|
|
except NameError:
|
|
pass
|
|
else:
|
|
try:
|
|
wiki = wikitcms.wiki.Wiki(('https', 'fedoraproject.org'), '/w/')
|
|
except NameError:
|
|
pass
|
|
args.func(args, wiki)
|