allow running OpenQA in docker container, add logging instead of printing to stdout

Summary:
Logging is introduced instead of output to stdout and several
new options are added - user can specify directory for downloading
isos and he can also specify docker container where openqa is
running. Info about newest tested version is not written if no
images were found.

Differential Revision: https://phab.qadevel.cloud.fedoraproject.org/D420
This commit is contained in:
Garret Raziel 2015-07-09 10:31:53 +02:00
parent 093c2b5be7
commit 894e27ea84
2 changed files with 177 additions and 83 deletions

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
import json import json
import urllib2
import re import re
import urlgrabber import urlgrabber
import os.path import os.path
@ -9,22 +8,31 @@ import sys
import subprocess import subprocess
import argparse import argparse
import datetime import datetime
import logging
# We can at least find images and run OpenQA jobs without wikitcms # We can at least find images and run OpenQA jobs without wikitcms
try: try:
import wikitcms.wiki import wikitcms.wiki
except: except ImportError:
pass wikitcms = None
import fedfind.release import fedfind.release
from report_job_results import report_results from report_job_results import report_results
PERSISTENT = "/var/tmp/openqa_watcher.json" PERSISTENT = "/var/tmp/openqa_watcher.json"
ISO_PATH = "/var/lib/openqa/factory/iso/" ISO_PATH = "/var/lib/openqa/factory/iso/"
RUN_COMMAND = "/var/lib/openqa/script/client isos post ISO=%s DISTRI=fedora VERSION=rawhide FLAVOR=%s ARCH=%s BUILD=%s" RUN_COMMAND = "/var/lib/openqa/script/client isos post " \
VERSIONS = ['i386', 'x86_64'] "ISO=%s DISTRI=fedora VERSION=rawhide FLAVOR=%s ARCH=%s BUILD=%s"
DOCKER_COMMAND = "docker exec %s " + RUN_COMMAND
ARCHES = ['i386', 'x86_64']
class TriggerException(Exception):
pass
# read last tested version from file # read last tested version from file
def read_last(): def read_last():
logging.debug("reading latest checked version from %s", PERSISTENT)
result = {} result = {}
try: try:
f = open(PERSISTENT, "r") f = open(PERSISTENT, "r")
@ -32,12 +40,15 @@ def read_last():
f.close() f.close()
json_parsed = json.loads(json_raw) json_parsed = json.loads(json_raw)
except IOError: except IOError:
logging.warning("cannot read file %s", PERSISTENT)
return result, {} return result, {}
for version in VERSIONS: for arch in ARCHES:
result[version] = json_parsed.get(version, None) result[arch] = json_parsed.get(arch, None)
logging.info("latest version for %s: %s", arch, result[arch])
return result, json_parsed return result, json_parsed
def download_image(image): def download_image(image):
"""Download a given image with a name that should be unique. """Download a given image with a name that should be unique.
Returns the filename of the image (not the path). Returns the filename of the image (not the path).
@ -49,70 +60,94 @@ def download_image(image):
isoname = "{0}_{1}".format(ver, image.filename) isoname = "{0}_{1}".format(ver, image.filename)
filename = os.path.join(ISO_PATH, isoname) filename = os.path.join(ISO_PATH, isoname)
if not os.path.isfile(filename): if not os.path.isfile(filename):
print("Downloading {0} ({1}) to {2}...".format( logging.info("downloading %s (%s) to %s", image.url, image.desc, filename)
image.url, image.desc, filename))
# Icky hack around a urlgrabber bug: # Icky hack around a urlgrabber bug:
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=715416 # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=715416
urlgrabber.urlgrab(image.url.replace('https', 'http'), filename) urlgrabber.urlgrab(image.url.replace('https', 'http'), filename)
else:
logging.info("%s already exists", filename)
return isoname return isoname
def run_openqa_jobs(isoname, flavor, arch, build):
def run_openqa_jobs(isoname, flavor, arch, build, docker_container):
"""# run OpenQA 'isos' job on selected isoname, with given arch """# run OpenQA 'isos' job on selected isoname, with given arch
and a version string. **NOTE**: the version passed to OpenQA as and a version string. If provided, use docker container docker_container
that includes OpenQA WebUI. **NOTE**: the version passed to OpenQA as
BUILD and is parsed back into the 'relval report-auto' arguments BUILD and is parsed back into the 'relval report-auto' arguments
by report_job_results.py; it is expected to be in the form of a by report_job_results.py; it is expected to be in the form of a
3-tuple on which join('_') has been run, and the three elements 3-tuple on which join('_') has been run, and the three elements
will be passed as --release, --compose and --milestone. Returns will be passed as --release, --compose and --milestone. Returns
list of job IDs. list of job IDs.
""" """
if docker_container:
command = DOCKER_COMMAND % (docker_container, isoname, flavor, arch, build)
else:
command = RUN_COMMAND % (isoname, flavor, arch, build) command = RUN_COMMAND % (isoname, flavor, arch, build)
logging.info("executing: %s", command)
# starts OpenQA jobs # starts OpenQA jobs
output = subprocess.check_output(command.split()) output = subprocess.check_output(command.split())
logging.debug("command executed")
# read ids from OpenQA to wait for # read ids from OpenQA to wait for
r = re.compile(r'ids => \[(?P<from>\d+)( \.\. (?P<to>\d+))?\]') r = re.compile(r'ids => \[(?P<from>\d+)( \.\. (?P<to>\d+))?\]')
match = r.search(output) match = r.search(output)
if match and match.group('to'): if match and match.group('to'):
from_i = int(match.group('from')) from_i = int(match.group('from'))
to_i = int(match.group('to')) + 1 to_i = int(match.group('to')) + 1
logging.info("planned jobs: %d to %d", from_i, to_i - 1)
return range(from_i, to_i) return range(from_i, to_i)
elif match: elif match:
return [int(match.group('from'))] job_id = int(match.group('from'))
logging.info("planned job: %d", job_id)
return [job_id]
else: else:
logging.info("no planned jobs")
return [] return []
def jobs_from_current(wiki):
def jobs_from_current(wiki, docker_container):
"""Schedule jobs against the 'current' release validation event """Schedule jobs against the 'current' release validation event
(according to wikitcms) if we have not already. Returns a tuple, (according to wikitcms) if we have not already. Returns a tuple,
first value is the job list, second is the current event. first value is the job list, second is the current event.
""" """
if not wiki: if not wiki:
print("python-wikitcms is required for current validation event " logging.warning("python-wikitcms is required for current validation event discovery.")
"discovery.")
return ([], None) return ([], None)
last_versions, json_parsed = read_last() last_versions, json_parsed = read_last()
currev = wiki.current_event currev = wiki.current_event
print("Current event: {0}".format(currev.version)) logging.info("current event: %s", currev.version)
runarches = [] runarches = []
for arch in VERSIONS: for arch in ARCHES:
last_version = last_versions.get(arch, None) last_version = last_versions.get(arch, None)
if last_version and last_version >= currev.sortname: if last_version and last_version >= currev.sortname:
print("Skipped: {0}".format(arch)) logging.info("skipped: %s: %s is newer or equal to %s",
arch, last_version, currev.sortname)
else: else:
runarches.append(arch) runarches.append(arch)
logging.debug("%s will be tested in version %s", arch, currev.sortname)
json_parsed[arch] = currev.sortname json_parsed[arch] = currev.sortname
jobs = []
try:
jobs = jobs_from_fedfind(currev.ff_release, runarches, docker_container)
logging.info("planned jobs: %s", jobs)
# write info about latest versions # write info about latest versions
f = open(PERSISTENT, "w") f = open(PERSISTENT, "w")
f.write(json.dumps(json_parsed)) f.write(json.dumps(json_parsed))
f.close() f.close()
logging.debug("written info about newest version")
jobs = jobs_from_fedfind(currev.ff_release, runarches) except TriggerException as e:
logging.error("cannot run jobs: %s", e)
return (jobs, currev) return (jobs, currev)
def jobs_from_fedfind(ff_release, arches=VERSIONS):
def jobs_from_fedfind(ff_release, arches=ARCHES, docker_container=None):
"""Given a fedfind.Release object, find the ISOs we want and run """Given a fedfind.Release object, find the ISOs we want and run
jobs on them. arches is an iterable of arches to run on, if not jobs on them. arches is an iterable of arches to run on, if not
specified, we'll use our constant. specified, we'll use our constant.
@ -123,11 +158,14 @@ def jobs_from_fedfind(ff_release, arches=VERSIONS):
fedfind.release.Query('imagetype', ('boot', 'live')), fedfind.release.Query('imagetype', ('boot', 'live')),
fedfind.release.Query('arch', arches), fedfind.release.Query('arch', arches),
fedfind.release.Query('payload', ('server', 'generic', 'workstation'))) fedfind.release.Query('payload', ('server', 'generic', 'workstation')))
logging.debug("querying fedfind for images")
images = ff_release.find_images(queries) images = ff_release.find_images(queries)
if len(images) == 0:
raise TriggerException("no available images")
# Now schedule jobs. First, let's get the BUILD value for openQA. # Now schedule jobs. First, let's get the BUILD value for openQA.
build = '_'.join( build = '_'.join((ff_release.release, ff_release.milestone, ff_release.compose))
(ff_release.release, ff_release.milestone, ff_release.compose))
# Next let's schedule the 'universal' tests. # Next let's schedule the 'universal' tests.
# We have different images in different composes: nightlies only # We have different images in different composes: nightlies only
@ -143,8 +181,7 @@ def jobs_from_fedfind(ff_release, arches=VERSIONS):
# image. # image.
for arch in arches: for arch in arches:
okimgs = (img for img in images if img.arch == arch and okimgs = (img for img in images if img.arch == arch and
any(img.imagetype == okt any(img.imagetype == okt for okt in ('dvd', 'boot', 'netinst')))
for okt in ('dvd', 'boot', 'netinst')))
bestscore = 0 bestscore = 0
bestimg = None bestimg = None
for img in okimgs: for img in okimgs:
@ -162,67 +199,82 @@ def jobs_from_fedfind(ff_release, arches=VERSIONS):
bestimg = img bestimg = img
bestscore = score bestscore = score
if not bestimg: if not bestimg:
print("No universal tests image found for {0}!".format(arch)) logging.warn("no universal tests image found for %s", arch)
continue continue
print("Running universal tests for {0} with {1}!".format( logging.info("running universal tests for %s with %s", arch, bestimg.desc)
arch, bestimg.desc))
isoname = download_image(bestimg) isoname = download_image(bestimg)
job_ids = run_openqa_jobs(isoname, 'universal', arch, build) job_ids = run_openqa_jobs(isoname, 'universal', arch, build, docker_container)
jobs.extend(job_ids) jobs.extend(job_ids)
# Now schedule per-image jobs. # Now schedule per-image jobs.
for image in images: for image in images:
isoname = download_image(image) isoname = download_image(image)
flavor = '_'.join((image.payload, image.imagetype)) flavor = '_'.join((image.payload, image.imagetype))
job_ids = run_openqa_jobs(isoname, flavor, image.arch, build) job_ids = run_openqa_jobs(isoname, flavor, image.arch, build, docker_container)
jobs.extend(job_ids) jobs.extend(job_ids)
return jobs return jobs
## SUB-COMMAND FUNCTIONS
# SUB-COMMAND FUNCTIONS
def run_current(args, wiki): def run_current(args, wiki):
"""run OpenQA for current release validation event, if we have """run OpenQA for current release validation event, if we have
not already done it. not already done it.
""" """
jobs, _ = jobs_from_current(wiki) logging.info("running on current release")
jobs, _ = jobs_from_current(wiki, args.docker_container)
# wait for jobs to finish and display results # wait for jobs to finish and display results
if jobs: if jobs:
print jobs logging.info("waiting for jobs: %s", jobs)
report_results(jobs) report_results(jobs)
logging.debug("finished")
sys.exit() sys.exit()
def run_compose(args, wiki=None): def run_compose(args, wiki=None):
"""run OpenQA on a specified compose, optionally reporting results """run OpenQA on a specified compose, optionally reporting results
if a matching wikitcms ValidationEvent is found by relval/wikitcms if a matching wikitcms ValidationEvent is found by relval/wikitcms
""" """
# get the fedfind release object # get the fedfind release object
try: try:
ff_release = fedfind.release.get_release( logging.debug("querying fedfind on specific compose: %s %s %s", args.release,
release=args.release, milestone=args.milestone, args.milestone, args.compose)
ff_release = fedfind.release.get_release(release=args.release, milestone=args.milestone,
compose=args.compose) compose=args.compose)
except ValueError as err: except ValueError as err:
logging.critical("compose %s %s %s was not found", args.release, args.milestone,
args.compose)
sys.exit(err[0]) sys.exit(err[0])
print("Running on compose: {0}".format(ff_release.version)) logging.info("running on compose: %s", ff_release.version)
jobs = []
try:
if args.arch: if args.arch:
jobs = jobs_from_fedfind(ff_release, [args.arch]) jobs = jobs_from_fedfind(ff_release, [args.arch], args.docker_container)
else: else:
jobs = jobs_from_fedfind(ff_release) jobs = jobs_from_fedfind(ff_release, docker_container=args.docker_container)
print(jobs) except TriggerException as e:
logging.error("cannot run jobs: %s", e)
logging.info("planned jobs: %s", jobs)
if args.submit_results: if args.submit_results:
report_results(jobs) report_results(jobs)
logging.debug("finished")
sys.exit() sys.exit()
def run_all(args, wiki=None): def run_all(args, wiki=None):
"""Do everything we can: test current validation event compose if """Do everything we can: test current validation event compose if
it's new, amd test both Rawhide and Branched nightlies if they it's new, amd test both Rawhide and Branched nightlies if they
exist and aren't the same as the 'current' compose. exist and aren't the same as the 'current' compose.
""" """
skip = '' skip = ''
logging.info("running all")
# Run for 'current' validation event. # Run for 'current' validation event.
(jobs, currev) = jobs_from_current(wiki) logging.debug("running for current")
print("Jobs from current validation event: {0}".format(jobs)) (jobs, currev) = jobs_from_current(wiki, args.docker_container)
logging.info("jobs from current validation event: %s", jobs)
utcdate = datetime.datetime.utcnow() utcdate = datetime.datetime.utcnow()
if args.yesterday: if args.yesterday:
@ -231,17 +283,20 @@ def run_all(args, wiki=None):
# Don't schedule tests for the same compose as both "today's # Don't schedule tests for the same compose as both "today's
# nightly" and "current validation event" # nightly" and "current validation event"
skip = currev.milestone skip = currev.milestone
logging.debug("skipping %s because it's both today's and current validation event", skip)
# Run for day's Rawhide nightly (if not same as current event.) # Run for day's Rawhide nightly (if not same as current event.)
if skip.lower() != 'rawhide': if skip.lower() != 'rawhide':
try: try:
rawhide_ffrel = fedfind.release.get_release( logging.debug("running for rawhide")
release='Rawhide', compose=utcdate) rawhide_ffrel = fedfind.release.get_release(release='Rawhide', compose=utcdate)
rawjobs = jobs_from_fedfind(rawhide_ffrel) rawjobs = jobs_from_fedfind(rawhide_ffrel, docker_container=args.docker_container)
print("Jobs from {0}: {1}".format(rawhide_ffrel.version, rawjobs)) logging.info("jobs from rawhide %s: %s", rawhide_ffrel.version, rawjobs)
jobs.extend(rawjobs) jobs.extend(rawjobs)
except ValueError as err: except ValueError as err:
print("Rawhide image discovery failed: {0}".format(err)) logging.error("rawhide image discovery failed: %s", err)
except TriggerException as e:
logging.error("cannot run jobs: %s", e)
# Run for day's Branched nightly (if not same as current event.) # Run for day's Branched nightly (if not same as current event.)
# We must guess a release for Branched, fedfind cannot do so. Best # We must guess a release for Branched, fedfind cannot do so. Best
@ -249,18 +304,23 @@ def run_all(args, wiki=None):
# compose (this is why we have jobs_from_current return currev). # compose (this is why we have jobs_from_current return currev).
if skip.lower() != 'branched': if skip.lower() != 'branched':
try: try:
branched_ffrel = fedfind.release.get_release( logging.debug("running for branched")
release=currev.release, milestone='Branched', compose=utcdate) branched_ffrel = fedfind.release.get_release(release=currev.release,
branchjobs = jobs_from_fedfind(branched_ffrel) milestone='Branched', compose=utcdate)
print("Jobs from {0}: {1}".format(branched_ffrel.version, branchjobs = jobs_from_fedfind(branched_ffrel, docker_container=args.docker_container)
branchjobs)) logging.info("jobs from %s: %s", branched_ffrel.version, branchjobs)
jobs.extend(branchjobs) jobs.extend(branchjobs)
except ValueError as err: except ValueError as err:
print("Branched image discovery failed: {0}".format(err)) logging.error("branched image discovery failed: %s", err)
except TriggerException as e:
logging.error("cannot run jobs: %s", e)
if jobs: if jobs:
logging.info("waiting for jobs: %s", jobs)
report_results(jobs) report_results(jobs)
logging.debug("finished")
sys.exit() sys.exit()
if __name__ == "__main__": if __name__ == "__main__":
test_help = "Operate on the staging wiki (for testing)" test_help = "Operate on the staging wiki (for testing)"
parser = argparse.ArgumentParser(description=( parser = argparse.ArgumentParser(description=(
@ -269,8 +329,6 @@ if __name__ == "__main__":
parser_current = subparsers.add_parser( parser_current = subparsers.add_parser(
'current', description="Run for the current event, if needed.") 'current', description="Run for the current event, if needed.")
parser_current.add_argument(
'-t', '--test', help=test_help, required=False, action='store_true')
parser_current.set_defaults(func=run_current) parser_current.set_defaults(func=run_current)
parser_compose = subparsers.add_parser( parser_compose = subparsers.add_parser(
@ -297,8 +355,6 @@ if __name__ == "__main__":
'-s', '--submit-results', help="Submit the results to the release " '-s', '--submit-results', help="Submit the results to the release "
"validation event for this compose, if possible", required=False, "validation event for this compose, if possible", required=False,
action='store_true') action='store_true')
parser_compose.add_argument(
'-t', '--test', help=test_help, required=False, action='store_true')
parser_compose.set_defaults(func=run_compose) parser_compose.set_defaults(func=run_compose)
parser_all = subparsers.add_parser( parser_all = subparsers.add_parser(
@ -308,22 +364,48 @@ if __name__ == "__main__":
parser_all.add_argument( parser_all.add_argument(
'-y', '--yesterday', help="Run on yesterday's nightlies, not today's", '-y', '--yesterday', help="Run on yesterday's nightlies, not today's",
required=False, action='store_true') required=False, action='store_true')
parser_all.add_argument(
'-t', '--test', help=test_help, required=False, action='store_true')
parser_all.set_defaults(func=run_all) parser_all.set_defaults(func=run_all)
parser.add_argument(
'-d', '--docker-container', help="If given, run tests using "
"specified docker container")
parser.add_argument(
'-t', '--test', help=test_help, required=False, action='store_true')
parser.add_argument(
'-f', '--log-file', help="If given, log into specified file. When not provided, stdout"
" is used", required=False)
parser.add_argument(
'-l', '--log-level', help="Specify log level to be outputted", required=False)
parser.add_argument('-i', '--iso-directory', help="Directory for downloading isos, default"
" is %s" % PERSISTENT, required=False)
args = parser.parse_args() args = parser.parse_args()
if args.log_level:
log_level = getattr(logging, args.log_level.upper(), None)
if not isinstance(log_level, int):
log_level = logging.WARNING
else:
log_level = logging.WARNING
if args.log_file:
logging.basicConfig(format="%(levelname)s:%(name)s:%(asctime)s:%(message)s",
filename=args.log_file, level=log_level)
else:
logging.basicConfig(level=log_level)
if args.iso_directory:
ISO_PATH = args.iso_directory
wiki = None wiki = None
if args.test: if args.test:
try: logging.debug("using test wiki")
wiki = wikitcms.wiki.Wiki(('https', 'stg.fedoraproject.org'), if wikitcms:
'/w/') wiki = wikitcms.wiki.Wiki(('https', 'stg.fedoraproject.org'), '/w/')
except NameError:
pass
else: else:
try: logging.warn("wikitcms not found, reporting to wiki disabled")
else:
if wikitcms:
wiki = wikitcms.wiki.Wiki(('https', 'fedoraproject.org'), '/w/') wiki = wikitcms.wiki.Wiki(('https', 'fedoraproject.org'), '/w/')
except NameError: else:
pass logging.warn("wikitcms not found, reporting to wiki disabled")
args.func(args, wiki) args.func(args, wiki)

View File

@ -2,6 +2,7 @@ import requests
import argparse import argparse
import sys import sys
import time import time
import logging
import conf_test_suites import conf_test_suites
from operator import attrgetter from operator import attrgetter
@ -9,6 +10,7 @@ from wikitcms.wiki import Wiki, ResTuple
API_ROOT = "http://localhost/api/v1" API_ROOT = "http://localhost/api/v1"
SLEEPTIME = 60 SLEEPTIME = 60
logger = logging.getLogger(__name__)
def get_passed_testcases(job_ids): def get_passed_testcases(job_ids):
@ -17,17 +19,19 @@ def get_passed_testcases(job_ids):
Returns ~ list of str - names of passed testcases Returns ~ list of str - names of passed testcases
""" """
running_jobs = dict([(job_id, "%s/jobs/%s" % (API_ROOT, job_id)) for job_id in job_ids]) running_jobs = dict([(job_id, "%s/jobs/%s" % (API_ROOT, job_id)) for job_id in job_ids])
logger.info("running jobs: %s", running_jobs)
finished_jobs = {} finished_jobs = {}
while running_jobs: while running_jobs:
for job_id, url in running_jobs.items(): for job_id, url in running_jobs.items():
job_state = requests.get(url).json()['job'] job_state = requests.get(url).json()['job']
if job_state['state'] == 'done': if job_state['state'] == 'done':
print "Job %s is done" % job_id logger.info("job %s is done", job_id)
finished_jobs[job_id] = job_state finished_jobs[job_id] = job_state
del running_jobs[job_id] del running_jobs[job_id]
if running_jobs: if running_jobs:
time.sleep(SLEEPTIME) time.sleep(SLEEPTIME)
logger.info("all jobs finished")
passed_testcases = set() passed_testcases = set()
for job_id in job_ids: for job_id in job_ids:
@ -56,27 +60,35 @@ def get_passed_testcases(job_ids):
return sorted(list(passed_testcases), key=attrgetter('testcase')) return sorted(list(passed_testcases), key=attrgetter('testcase'))
def report_results(job_ids, printcases=False, report=True): def report_results(job_ids, verbose=False, report=True):
passed_testcases = get_passed_testcases(job_ids) passed_testcases = get_passed_testcases(job_ids)
if printcases: if verbose:
for restup in passed_testcases: for restup in passed_testcases:
print(restup) print restup
logger.info("passed testcases: %s", passed_testcases)
if report: if report:
if verbose:
print "Reporting test passes:" print "Reporting test passes:"
logger.info("reporting test passes")
wiki = Wiki() wiki = Wiki()
wiki.login() wiki.login()
if not wiki.logged_in: if not wiki.logged_in:
logger.error("could not log in to wiki")
sys.exit("Could not log in to wiki!") sys.exit("Could not log in to wiki!")
# Submit the results # Submit the results
(insuffs, dupes) = wiki.report_validation_results(passed_testcases) (insuffs, dupes) = wiki.report_validation_results(passed_testcases)
for dupe in dupes: for dupe in dupes:
tmpl = "Already reported result for test {0}, env {1}! Will not report dupe." tmpl = "already reported result for test %s, env %s! Will not report dupe."
print(tmpl.format(dupe.testcase, dupe.env)) if verbose:
print tmpl % (dupe.testcase, dupe.env)
logger.info(tmpl, dupe.testcases, dupe.env)
else: else:
if verbose:
print "\n\n### No reporting is done! ###\n\n" print "\n\n### No reporting is done! ###\n\n"
logger.warning("no reporting is done")
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Evaluate per-testcase results from OpenQA job runs") parser = argparse.ArgumentParser(description="Evaluate per-testcase results from OpenQA job runs")
@ -84,4 +96,4 @@ if __name__ == "__main__":
parser.add_argument('--report', default=False, action='store_true') parser.add_argument('--report', default=False, action='store_true')
args = parser.parse_args() args = parser.parse_args()
report_results(args.jobs, printcases=True, report=args.report) report_results(args.jobs, verbose=True, report=args.report)