1
0
mirror of https://pagure.io/fedora-qa/createhdds.git synced 2024-11-21 23:03:08 +00:00

Use python-wikitcms and fedfind

The basic approach is that openqa_trigger gets a ValidationEvent from
python-wikitcms - either the Wiki.current_event property for
'current', or the event specified, obtained via the newly-added
Wiki.get_validation_event(), for 'event'. For 'event' it then just
goes ahead and runs the jobs and prints the IDs. For 'current' it
checks the last run compose version for each arch and runs if needed,
as before. The ValidationEvent's 'sortname' property is the value
written out to PERSISTENT to track the 'last run' - this property is
intended to always sort compose events 'correctly', so we should
always run when appropriate even when going from Rawhide to Branched,
Branched to a TC, TC to RC, RC to (next milestone) TC.

On both paths it gets a fedfind.Release object via the ValidationEvent
- ValidationEvents have a ff_release property which is the
fedfind.Release object that matches that event. It then queries
fedfind for image locations using a query that tries to get just *one*
generic-ish network install image for each arch. It passes the
location to download_image(), which is just download_rawhide_iso()
renamed and does the same job, only it can be simpler now.

From there it works pretty much as before, except we use the
ValidationEvent's 'version' property as the BUILD setting for OpenQA,
and report_job_results get_relval_commands() is tweaked slightly to
parse this properly to produce a correct report-auto command.

Probably the most likely bits to break here are the sortname thing
(see wikitcms helpers.py fedora_release_sort(), it's pretty stupid, I
should re-write it) and the image query, which might wind up getting
more than one image depending on how exactly the F22 Alpha composes
look. I'll keep a close eye on that. We can always take the list from
fedfind and further filter it so we have just one image per arch.
Image objects have a .arch attribute so this will be easy to do if
necessary. I *could* give the fedfind query code a 'I'm feeling lucky'-
ish mode to only return one image per (whatever), but not sure if that
would be too specialized, I'll think about it.
This commit is contained in:
Adam Williamson 2015-02-16 18:01:58 +01:00 committed by Josef Skladanka
parent 45e90cd076
commit b54aed6aa1
3 changed files with 186 additions and 79 deletions

View File

@ -1,10 +1,10 @@
TESTCASES = {
"QA:Testcase_Boot_default_install Server offline": {
"Server offline": {
"section": 'Default boot and install',
"env": "$RUNARCH$",
"type": "Installation",
},
"QA:Testcase_Boot_default_install Server netinst": {
"Server netinst": {
"section": 'Default boot and install',
"env": "$RUNARCH$",
"type": "Installation",
@ -16,7 +16,7 @@ TESTCASES = {
},
"QA:Testcase_partitioning_guided_empty": {
"section": "Guided storage configuration",
"env": "x86", # Probably a bug in relval - column name is "x86 BIOS", but there is a comment there just behind 'x86' which probably makes it strip the rest
"env": "x86 BIOS",
"type": "Installation",
},
"QA:Testcase_Anaconda_User_Interface_Graphical": {
@ -36,7 +36,7 @@ TESTCASES = {
},
"QA:Testcase_partitioning_guided_delete_all": {
"section": "Guided storage configuration",
"env": "x86", # Probably a bug in relval - column name is "x86 BIOS", but there is a comment there just behind 'x86' which probably makes it strip the rest
"env": "x86 BIOS",
"type": "Installation",
},
"QA:Testcase_install_to_SATA": {
@ -46,7 +46,7 @@ TESTCASES = {
},
"QA:Testcase_partitioning_guided_multi_select": {
"section": "Guided storage configuration",
"env": "x86", # Probably a bug in relval - column name is "x86 BIOS", but there is a comment there just behind 'x86' which probably makes it strip the rest
"env": "x86 BIOS",
"type": "Installation",
},
"QA:Testcase_install_to_SCSI": {
@ -71,17 +71,17 @@ TESTCASES = {
},
"QA:Testcase_install_repository_Mirrorlist_graphical": {
"section": "Installation repositories",
"env": "result",
"env": "Result",
"type": "Installation",
},
"QA:Testcase_install_repository_HTTP/FTP_graphical": {
"section": "Installation repositories",
"env": "result",
"env": "Result",
"type": "Installation",
},
"QA:Testcase_install_repository_HTTP/FTP_variation": {
"section": "Installation repositories",
"env": "result",
"env": "Result",
"type": "Installation",
},
"QA:Testcase_Package_Sets_Minimal_Package_Install": {
@ -91,12 +91,12 @@ TESTCASES = {
},
"QA:Testcase_partitioning_guided_encrypted": {
"section": "Guided storage configuration",
"env": "x86", # Probably a bug in relval - column name is "x86 BIOS", but there is a comment there just behind 'x86' which probably makes it strip the rest
"env": "x86 BIOS",
"type": "Installation",
},
"QA:Testcase_partitioning_guided_delete_partial": {
"section": "Guided storage configuration",
"env": "x86",
"env": "x86 BIOS",
"type": "Installation",
},
# "": {
@ -109,7 +109,7 @@ TESTCASES = {
TESTSUITES = {
"server_simple":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -117,7 +117,7 @@ TESTSUITES = {
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_delete_pata":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_PATA",
"QA:Testcase_partitioning_guided_delete_all",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -125,7 +125,7 @@ TESTSUITES = {
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_sata_multi":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_SATA",
"QA:Testcase_partitioning_guided_multi_select",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -133,7 +133,7 @@ TESTSUITES = {
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_scsi_updates_img":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_SCSI",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_updates.img_via_URL",
@ -148,7 +148,7 @@ TESTSUITES = {
"QA:Testcase_Kickstart_Http_Server_Ks_Cfg",
],
"server_mirrorlist_graphical":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -157,7 +157,7 @@ TESTSUITES = {
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_repository_http_graphical":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -166,7 +166,7 @@ TESTSUITES = {
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_repository_http_variation":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -175,7 +175,7 @@ TESTSUITES = {
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_mirrorlist_http_variation":[
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -184,7 +184,7 @@ TESTSUITES = {
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_simple_encrypted": [
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -193,9 +193,9 @@ TESTSUITES = {
"QA:Testcase_partitioning_guided_encrypted",
],
"server_delete_partial": [
"QA:Testcase_Boot_default_install Server netinst",
"Server netinst",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_delete_partial"
"QA:Testcase_partitioning_guided_delete_partial",
"QA:Testcase_Anaconda_User_Interface_Graphical",
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_Package_Sets_Minimal_Package_Install",

View File

@ -7,15 +7,19 @@ import urlgrabber
import os.path
import sys
import subprocess
import argparse
# We can at least find images and run OpenQA jobs without wikitcms
try:
import wikitcms.wiki
except:
pass
import fedfind.release
from report_job_results import report_results
PERSISTENT = "/var/tmp/openqa_watcher.json"
CURRENT_TEST = "https://fedoraproject.org/wiki/Test_Results:Current_Installation_Test"
ISO_URL = "https://kojipkgs.fedoraproject.org/mash/rawhide-%s/rawhide/%s/os/images/boot.iso"
ISO_REGEX = re.compile(r'https://kojipkgs\.fedoraproject\.org/mash/(?P<name>rawhide-(?P<build>\d+))/rawhide/(?P<arch>x86_64|i386)/os/images/boot\.iso')
ISO_PATH = "/var/lib/openqa/factory/iso/"
RUN_COMMAND = "/var/lib/openqa/script/client isos post ISO=%s DISTRI=fedora VERSION=rawhide FLAVOR=server ARCH=%s BUILD=%s_%s"
RUN_COMMAND = "/var/lib/openqa/script/client isos post ISO=%s DISTRI=fedora VERSION=rawhide FLAVOR=server ARCH=%s BUILD=%s"
VERSIONS = ['i386', 'x86_64']
# read last tested version from file
@ -33,27 +37,30 @@ def read_last():
result[version] = json_parsed.get(version, None)
return result, json_parsed
# read current version from Current Installation Test page
def read_currents():
page = urllib2.urlopen(CURRENT_TEST).read()
f_regex = re.compile(r'<title>.*Fedora (?P<version>\d+).*</title>')
m = f_regex.search(page)
for match in ISO_REGEX.finditer(page):
yield m.group('version'), match.group("build"), match.group(0), match.group("name"), match.group("arch")
# download rawhide iso from koji
def download_rawhide_iso(link, name, arch):
isoname = "%s_%s.iso" % (name, arch)
def download_image(image):
"""Download a given image with a name that should be unique for
this event and arch (until we start testing different images
for the same event and arch). Returns the filename of the image
(not the path).
"""
isoname = "{0}_{1}.iso".format(image.version.replace(' ', '_'), image.arch)
filename = os.path.join(ISO_PATH, isoname)
if not os.path.isfile(filename):
link = "http://" + link[len("https://"):]
urlgrabber.urlgrab(link, filename)
# Icky hack around a urlgrabber bug:
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=715416
urlgrabber.urlgrab(image.url.replace('https', 'http'), filename)
return isoname
# run OpenQA 'isos' job on selected isoname, with given arch and build
# returns list of job IDs
def run_openqa_jobs(isoname, arch, fedora_version, build):
command = RUN_COMMAND % (isoname, arch, fedora_version, build)
def run_openqa_jobs(isoname, arch, image_version):
"""# run OpenQA 'isos' job on selected isoname, with given arch
and a version string. **NOTE**: the version passed to OpenQA as
BUILD and is parsed back into the 'relval report-auto' arguments
by report_job_results.py; it is expected to be in the form of a
3-tuple on which join('_') has been run, and the three elements
will be passed as --release, --compose and --milestone. Returns
list of job IDs.
"""
command = RUN_COMMAND % (isoname, arch, image_version)
# starts OpenQA jobs
output = subprocess.check_output(command.split())
@ -70,28 +77,24 @@ def run_openqa_jobs(isoname, arch, fedora_version, build):
else:
return []
# run OpenQA on rawhide if there is newer version since last run
def run_if_newer():
# run OpenQA on current compose if it is newer version since last run
def run_current(args, wiki):
if not wiki:
sys.exit("python-wikitcms is required for --current. Try "
"--compose to run against today's Rawhide nightly "
"without wiki result submission.")
last_versions, json_parsed = read_last()
jobs = []
# for every architecture
for f_version, current_version, link, name, arch in read_currents():
# don't run when there is newer version
currev = wiki.current_event
print("Current event: {0}".format(currev.version))
runarches = []
for arch in VERSIONS:
last_version = last_versions.get(arch, None)
print f_version, current_version, link, name, arch,
if last_version is not None and (last_version >= current_version):
print " - Skipped"
continue
print ""
json_parsed[arch] = current_version
isoname = download_rawhide_iso(link, name, arch)
job_ids = run_openqa_jobs(isoname, arch, f_version, current_version)
jobs.extend(job_ids)
if last_version and last_version >= currev.sortname:
print("Skipped: {0}".format(arch))
else:
runarches.append(arch)
json_parsed[arch] = currev.sortname
jobs = jobs_from_fedfind(currev.ff_release, runarches)
# write info about latest versions
f = open(PERSISTENT, "w")
@ -101,18 +104,127 @@ def run_if_newer():
# wait for jobs to finish and display results
print jobs
report_results(jobs)
sys.exit()
def run_compose(args, wiki=None):
"""run OpenQA on a specified compose, optionally reporting results
if a matching wikitcms ValidationEvent can be found.
"""
# get the fedfind release object
try:
ff_release = fedfind.release.get_release(
release=args.release, milestone=args.milestone,
compose=args.compose)
except ValueError as err:
sys.exit(err[0])
if args.submit_results:
try:
# sanity check, there's...some voodoo in here. but this isn't
# really strictly necessary, and we don't use the event object
# for anything.
event = wiki.get_validation_event(
release=ff_release.release, milestone=ff_release.milestone,
compose=ff_release.compose)
evff = event.ff_release
if evff.version != ff_release.version:
print("Release validation event's fedfind object does not "
"match the one from fedfind's get_release(). Something's"
" wrong somewhere. Result submission disabled.")
args.submit_results = False
except ValueError:
print("Warning: could not find validation test event for this "
"compose. Continuing with OpenQA jobs, but results will "
" not be submitted to the wiki.")
args.submit_results = False
print("Running on compose: {0}".format(ff_release.version))
if args.arch:
jobs = jobs_from_fedfind(ff_release, [args.arch])
else:
jobs = jobs_from_fedfind(ff_release)
print(jobs)
if args.submit_results:
report_results(jobs)
sys.exit()
def jobs_from_fedfind(ff_release, arches=VERSIONS):
"""Given a fedfind.Release object, find the ISOs we want and run
jobs on them. arches is an iterable of arches to run on, if not
specified, we'll use our constant.
"""
# Find boot.iso images for our arches; third query is a bit of a
# bodge till I know what 22 TCs/RCs will actually look like,
# ideally we want a query that will reliably return one image per
# arch without us having to filter further, but we can always just
# take the first image for each arch if necessary
jobs = []
queries = (
fedfind.release.Query('imagetype', ('boot',)),
fedfind.release.Query('arch', arches),
fedfind.release.Query('payload', ('server', 'generic')))
for image in ff_release.find_images(queries):
print("{0} {1}".format(image.url, image.desc))
isoname = download_image(image)
version = '_'.join(
(ff_release.release, ff_release.milestone, ff_release.compose))
job_ids = run_openqa_jobs(isoname, image.arch, version)
jobs.extend(job_ids)
return jobs
if __name__ == "__main__":
if len(sys.argv) == 1:
run_if_newer()
elif len(sys.argv) == 3:
version = sys.argv[1]
arch = sys.argv[2]
name = "rawhide-%s" % version
link = ISO_URL % (sys.argv[1], sys.argv[2])
isoname = download_rawhide_iso(link, name, arch)
job_ids = run_openqa_jobs(isoname, arch, "", version)
print job_ids
test_help = "Operate on the staging wiki (for testing)"
parser = argparse.ArgumentParser(description=(
"Run OpenQA tests for a release validation test event."))
subparsers = parser.add_subparsers()
parser_current = subparsers.add_parser(
'current', description="Run for the current event, if needed.")
parser_current.add_argument(
'-t', '--test', help=test_help, required=False, action='store_true')
parser_current.set_defaults(func=run_current)
parser_compose = subparsers.add_parser(
'compose', description="Run for a specific compose (TC/RC or nightly)."
" If a matching release validation test event can be found and "
"--submit-results is passed, results will be reported.")
parser_compose.add_argument(
'-r', '--release', type=int, required=False, choices=range(12, 100),
metavar="12-99", help="Release number of a specific compose to run "
"against. Must be passed for validation event discovery to succeed.")
parser_compose.add_argument(
'-m', '--milestone', help="The milestone to operate on (Alpha, Beta, "
"Final, Branched, Rawhide). Must be specified for a TC/RC; for a "
"nightly, will be guessed if not specified", required=False,
choices=['Alpha', 'Beta', 'Final', 'Branched', 'Rawhide'])
parser_compose.add_argument(
'-c', '--compose', help="The version to run for; either the compose "
"(for a TC/RC), or the date (for a nightly build)", required=False,
metavar="{T,R}C1-19 or YYYYMMDD")
parser_compose.add_argument(
'-a', '--arch', help="The arch to run for", required=False,
choices=('x86_64', 'i386'))
parser_compose.add_argument(
'-s', '--submit-results', help="Submit the results to the release "
"validation event for this compose, if possible", required=False,
action='store_true')
parser_compose.add_argument(
'-t', '--test', help=test_help, required=False, action='store_true')
parser_compose.set_defaults(func=run_compose)
args = parser.parse_args()
wiki = None
if args.test:
try:
wiki = wikitcms.wiki.Wiki(('https', 'stg.fedoraproject.org'),
'/w/')
except NameError:
pass
else:
print "%s [rawhide_version arch]" % sys.arv[0]
try:
wiki = wikitcms.wiki.Wiki(('https', 'fedoraproject.org'), '/w/')
except NameError:
pass
args.func(args, wiki)

View File

@ -46,12 +46,7 @@ def get_relval_commands(passed_testcases):
for key in passed_testcases:
cmd_ = relval_template
version, _, build, arch = key
if version == 'rawhide':
cmd_ += ' --release "%s" --build Rawhide --version "%s"' % tuple(build.split('_')) #"22_20150110"
elif version == 'branched':
#cmd_ += ' --release "%s" --milestone "%s" --compose "%s"' % tuple(build.split('_')) #"22_Alpha_TC1"
continue
cmd_ += ' --release "%s" --milestone "%s" --compose "%s"' % tuple(build.split('_'))
for tc_name in passed_testcases[key]:
testcase = conf_test_suites.TESTCASES[tc_name]