1
0
mirror of https://pagure.io/fedora-qa/createhdds.git synced 2024-09-28 07:37:23 +00:00

reformatted to PEP8

This commit is contained in:
Auto PEP8 2015-07-14 08:10:38 +02:00 committed by Garret Raziel
parent 4b77522b03
commit 12a58e84a6
2 changed files with 152 additions and 150 deletions

View File

@ -129,45 +129,45 @@ TESTCASES = {
"env": "x86 BIOS",
"type": "Installation",
},
# "": {
# "name_cb": callbackfunc # optional, called with 'flavor'
# "section": "",
# "env": "x86",
# "type": "Installation",
# },
}
# "": {
# "name_cb": callbackfunc # optional, called with 'flavor'
# "section": "",
# "env": "x86",
# "type": "Installation",
# },
}
TESTSUITES = {
"default_install":[
"default_install": [
"QA:Testcase_Boot_default_install",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
"QA:Testcase_Anaconda_user_creation",
],
"package_set_minimal":[
"package_set_minimal": [
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_Anaconda_User_Interface_Graphical",
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_delete_pata":[
"server_delete_pata": [
"QA:Testcase_install_to_PATA",
"QA:Testcase_partitioning_guided_delete_all",
"QA:Testcase_Anaconda_User_Interface_Graphical",
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_sata_multi":[
"server_sata_multi": [
"QA:Testcase_install_to_SATA",
"QA:Testcase_partitioning_guided_multi_select",
"QA:Testcase_Anaconda_User_Interface_Graphical",
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_scsi_updates_img":[
"server_scsi_updates_img": [
"QA:Testcase_install_to_SCSI",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_updates.img_via_URL",
@ -175,13 +175,13 @@ TESTSUITES = {
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_kickstart_user_creation":[
"server_kickstart_user_creation": [
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_kickstart_user_creation",
"QA:Testcase_Kickstart_Http_Server_Ks_Cfg",
],
"server_mirrorlist_graphical":[
"server_mirrorlist_graphical": [
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -189,7 +189,7 @@ TESTSUITES = {
"QA:Testcase_install_repository_Mirrorlist_graphical",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_repository_http_graphical":[
"server_repository_http_graphical": [
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -197,7 +197,7 @@ TESTSUITES = {
"QA:Testcase_install_repository_HTTP/FTP_graphical",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_repository_http_variation":[
"server_repository_http_variation": [
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -205,7 +205,7 @@ TESTSUITES = {
"QA:Testcase_install_repository_HTTP/FTP_variation",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_mirrorlist_http_variation":[
"server_mirrorlist_http_variation": [
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_partitioning_guided_empty",
"QA:Testcase_Anaconda_User_Interface_Graphical",
@ -249,14 +249,13 @@ TESTSUITES = {
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_Package_Sets_Minimal_Package_Install",
],
"server_kickstart_hdd":[
"server_kickstart_hdd": [
"QA:Testcase_install_to_VirtIO",
"QA:Testcase_Anaconda_user_creation",
"QA:Testcase_kickstart_user_creation",
"QA:Testcase_Kickstart_Hd_Device_Path_Ks_Cfg",
],
"fedup_minimal":[
"fedup_minimal": [
"QA:Testcase_upgrade_fedup_cli_previous_minimal",
],
}

View File

@ -43,23 +43,25 @@ def get_passed_testcases(job_ids, client):
flavor = job['settings']['FLAVOR']
for testcase in conf_test_suites.TESTSUITES[testsuite]:
# each 'testsuite' is a list using testcase names to indicate which Wikitcms tests have
# passed if this job passes. Each testcase name is the name of a dict in the TESTCASES
# dict-of-dicts which more precisely identifies the 'test instance' (when there is more
# than one for a testcase) and environment for which the result should be filed.
# each 'testsuite' is a list using testcase names to indicate which Wikitcms tests
# have passed if this job passes. Each testcase name is the name of a dict in the
# TESTCASES dict-of-dicts which more precisely identifies the 'test instance' (when
# there is more than one for a testcase) and environment for which the result
# should be filed.
uniqueres = conf_test_suites.TESTCASES[testcase]
testname = ''
if 'name_cb' in uniqueres:
testname = uniqueres['name_cb'](flavor)
env = arch if uniqueres['env'] == '$RUNARCH$' else uniqueres['env']
result = ResTuple(
testtype=uniqueres['type'], release=release, milestone=milestone, compose=compose,
testcase=testcase, section=uniqueres['section'], testname=testname, env=env, status='pass',
bot=True)
testtype=uniqueres['type'], release=release, milestone=milestone,
compose=compose, testcase=testcase, section=uniqueres['section'],
testname=testname, env=env, status='pass', bot=True)
passed_testcases.add(result)
return sorted(list(passed_testcases), key=attrgetter('testcase'))
def report_results(job_ids, client, verbose=False, report=True):
passed_testcases = get_passed_testcases(job_ids, client)
if verbose:
@ -91,7 +93,8 @@ def report_results(job_ids, client, verbose=False, report=True):
logger.warning("no reporting is done")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Evaluate per-testcase results from OpenQA job runs")
parser = argparse.ArgumentParser(description="Evaluate per-testcase results from OpenQA job "
"runs")
parser.add_argument('jobs', type=int, nargs='+')
parser.add_argument('--report', default=False, action='store_true')