mirror of https://gitee.com/openkylin/linux.git
tc-testing: Implement the TdcResults module in tdc
In tdc and the valgrind plugin, begin using the TdcResults module to track executed test cases. Signed-off-by: Lucas Bates <lucasb@mojatatu.com> Signed-off-by: David S. Miller <davem@davemloft.net>
This commit is contained in:
parent
dfe465d33e
commit
915c158dea
|
@ -18,11 +18,12 @@ class TdcPlugin:
|
|||
if self.args.verbose > 1:
|
||||
print(' -- {}.post_suite'.format(self.sub_class))
|
||||
|
||||
def pre_case(self, test_ordinal, testid):
|
||||
def pre_case(self, test_ordinal, testid, test_name):
|
||||
'''run commands before test_runner does one test'''
|
||||
if self.args.verbose > 1:
|
||||
print(' -- {}.pre_case'.format(self.sub_class))
|
||||
self.args.testid = testid
|
||||
self.args.test_name = test_name
|
||||
self.args.test_ordinal = test_ordinal
|
||||
|
||||
def post_case(self):
|
||||
|
|
|
@ -11,6 +11,7 @@ from string import Template
|
|||
import subprocess
|
||||
import time
|
||||
from TdcPlugin import TdcPlugin
|
||||
from TdcResults import *
|
||||
|
||||
from tdc_config import *
|
||||
|
||||
|
@ -21,6 +22,7 @@ class SubPlugin(TdcPlugin):
|
|||
def __init__(self):
|
||||
self.sub_class = 'valgrind/SubPlugin'
|
||||
self.tap = ''
|
||||
self._tsr = TestSuiteReport()
|
||||
super().__init__()
|
||||
|
||||
def pre_suite(self, testcount, testidlist):
|
||||
|
@ -34,10 +36,14 @@ class SubPlugin(TdcPlugin):
|
|||
def post_suite(self, index):
|
||||
'''run commands after test_runner goes into a test loop'''
|
||||
super().post_suite(index)
|
||||
self._add_to_tap('\n|---\n')
|
||||
if self.args.verbose > 1:
|
||||
print('{}.post_suite'.format(self.sub_class))
|
||||
print('{}'.format(self.tap))
|
||||
#print('{}'.format(self.tap))
|
||||
for xx in range(index - 1, self.testcount):
|
||||
res = TestResult('{}-mem'.format(self.testidlist[xx]), 'Test skipped')
|
||||
res.set_result(ResultState.skip)
|
||||
res.set_errormsg('Skipped because of prior setup/teardown failure')
|
||||
self._add_results(res)
|
||||
if self.args.verbose < 4:
|
||||
subprocess.check_output('rm -f vgnd-*.log', shell=True)
|
||||
|
||||
|
@ -128,8 +134,17 @@ class SubPlugin(TdcPlugin):
|
|||
nle_num = int(nle_mo.group(1))
|
||||
|
||||
mem_results = ''
|
||||
res = TestResult('{}-mem'.format(self.args.testid),
|
||||
'{} memory leak check'.format(self.args.test_name))
|
||||
if (def_num > 0) or (ind_num > 0) or (pos_num > 0) or (nle_num > 0):
|
||||
mem_results += 'not '
|
||||
res.set_result(ResultState.fail)
|
||||
res.set_failmsg('Memory leak detected')
|
||||
res.append_failmsg(content)
|
||||
else:
|
||||
res.set_result(ResultState.success)
|
||||
|
||||
self._add_results(res)
|
||||
|
||||
mem_results += 'ok {} - {}-mem # {}\n'.format(
|
||||
self.args.test_ordinal, self.args.testid, 'memory leak check')
|
||||
|
@ -138,5 +153,8 @@ class SubPlugin(TdcPlugin):
|
|||
print('{}'.format(content))
|
||||
self._add_to_tap(content)
|
||||
|
||||
def _add_results(self, res):
|
||||
self._tsr.add_resultdata(res)
|
||||
|
||||
def _add_to_tap(self, more_tap_output):
|
||||
self.tap += more_tap_output
|
||||
|
|
|
@ -23,6 +23,7 @@ from tdc_config import *
|
|||
from tdc_helper import *
|
||||
|
||||
import TdcPlugin
|
||||
from TdcResults import *
|
||||
|
||||
|
||||
class PluginMgrTestFail(Exception):
|
||||
|
@ -60,10 +61,10 @@ class PluginMgr:
|
|||
for pgn_inst in reversed(self.plugin_instances):
|
||||
pgn_inst.post_suite(index)
|
||||
|
||||
def call_pre_case(self, test_ordinal, testid):
|
||||
def call_pre_case(self, test_ordinal, testid, test_name):
|
||||
for pgn_inst in self.plugin_instances:
|
||||
try:
|
||||
pgn_inst.pre_case(test_ordinal, testid)
|
||||
pgn_inst.pre_case(test_ordinal, testid, test_name)
|
||||
except Exception as ee:
|
||||
print('exception {} in call to pre_case for {} plugin'.
|
||||
format(ee, pgn_inst.__class__))
|
||||
|
@ -102,7 +103,6 @@ class PluginMgr:
|
|||
self.argparser = argparse.ArgumentParser(
|
||||
description='Linux TC unit tests')
|
||||
|
||||
|
||||
def replace_keywords(cmd):
|
||||
"""
|
||||
For a given executable command, substitute any known
|
||||
|
@ -187,6 +187,7 @@ def run_one_test(pm, args, index, tidx):
|
|||
result = True
|
||||
tresult = ""
|
||||
tap = ""
|
||||
res = TestResult(tidx['id'], tidx['name'])
|
||||
if args.verbose > 0:
|
||||
print("\t====================\n=====> ", end="")
|
||||
print("Test " + tidx["id"] + ": " + tidx["name"])
|
||||
|
@ -194,7 +195,7 @@ def run_one_test(pm, args, index, tidx):
|
|||
# populate NAMES with TESTID for this test
|
||||
NAMES['TESTID'] = tidx['id']
|
||||
|
||||
pm.call_pre_case(index, tidx['id'])
|
||||
pm.call_pre_case(index, tidx['id'], tidx['name'])
|
||||
prepare_env(args, pm, 'setup', "-----> prepare stage", tidx["setup"])
|
||||
|
||||
if (args.verbose > 0):
|
||||
|
@ -209,10 +210,11 @@ def run_one_test(pm, args, index, tidx):
|
|||
pm.call_post_execute()
|
||||
|
||||
if (exit_code is None or exit_code != int(tidx["expExitCode"])):
|
||||
result = False
|
||||
print("exit: {!r}".format(exit_code))
|
||||
print("exit: {}".format(int(tidx["expExitCode"])))
|
||||
#print("exit: {!r} {}".format(exit_code, int(tidx["expExitCode"])))
|
||||
res.set_result(ResultState.fail)
|
||||
res.set_failmsg('Command exited with {}, expected {}\n{}'.format(exit_code, tidx["expExitCode"], procout))
|
||||
print(procout)
|
||||
else:
|
||||
if args.verbose > 0:
|
||||
|
@ -223,20 +225,15 @@ def run_one_test(pm, args, index, tidx):
|
|||
if procout:
|
||||
match_index = re.findall(match_pattern, procout)
|
||||
if len(match_index) != int(tidx["matchCount"]):
|
||||
result = False
|
||||
res.set_result(ResultState.fail)
|
||||
res.set_failmsg('Could not match regex pattern. Verify command output:\n{}'.format(procout))
|
||||
else:
|
||||
res.set_result(ResultState.success)
|
||||
elif int(tidx["matchCount"]) != 0:
|
||||
result = False
|
||||
|
||||
if not result:
|
||||
tresult += 'not '
|
||||
tresult += 'ok {} - {} # {}\n'.format(str(index), tidx['id'], tidx['name'])
|
||||
tap += tresult
|
||||
|
||||
if result == False:
|
||||
if procout:
|
||||
tap += procout
|
||||
res.set_result(ResultState.fail)
|
||||
res.set_failmsg('No output generated by verify command.')
|
||||
else:
|
||||
tap += 'No output!\n'
|
||||
res.set_result(ResultState.success)
|
||||
|
||||
prepare_env(args, pm, 'teardown', '-----> teardown stage', tidx['teardown'], procout)
|
||||
pm.call_post_case()
|
||||
|
@ -245,7 +242,7 @@ def run_one_test(pm, args, index, tidx):
|
|||
|
||||
# remove TESTID from NAMES
|
||||
del(NAMES['TESTID'])
|
||||
return tap
|
||||
return res
|
||||
|
||||
def test_runner(pm, args, filtered_tests):
|
||||
"""
|
||||
|
@ -265,25 +262,15 @@ def test_runner(pm, args, filtered_tests):
|
|||
emergency_exit = False
|
||||
emergency_exit_message = ''
|
||||
|
||||
if args.notap:
|
||||
if args.verbose:
|
||||
tap = 'notap requested: omitting test plan\n'
|
||||
else:
|
||||
tap = str(index) + ".." + str(tcount) + "\n"
|
||||
tsr = TestSuiteReport()
|
||||
|
||||
try:
|
||||
pm.call_pre_suite(tcount, [tidx['id'] for tidx in testlist])
|
||||
except Exception as ee:
|
||||
ex_type, ex, ex_tb = sys.exc_info()
|
||||
print('Exception {} {} (caught in pre_suite).'.
|
||||
format(ex_type, ex))
|
||||
# when the extra print statements are uncommented,
|
||||
# the traceback does not appear between them
|
||||
# (it appears way earlier in the tdc.py output)
|
||||
# so don't bother ...
|
||||
# print('--------------------(')
|
||||
# print('traceback')
|
||||
traceback.print_tb(ex_tb)
|
||||
# print('--------------------)')
|
||||
emergency_exit_message = 'EMERGENCY EXIT, call_pre_suite failed with exception {} {}\n'.format(ex_type, ex)
|
||||
emergency_exit = True
|
||||
stage = 'pre-SUITE'
|
||||
|
@ -299,15 +286,26 @@ def test_runner(pm, args, filtered_tests):
|
|||
if args.verbose > 1:
|
||||
print('Not executing test {} {} because DEV2 not defined'.
|
||||
format(tidx['id'], tidx['name']))
|
||||
res = TestResult(tidx['id'], tidx['name'])
|
||||
res.set_result(ResultState.skip)
|
||||
res.set_errormsg('Not executed because DEV2 is not defined')
|
||||
tsr.add_resultdata(res)
|
||||
continue
|
||||
try:
|
||||
badtest = tidx # in case it goes bad
|
||||
tap += run_one_test(pm, args, index, tidx)
|
||||
res = run_one_test(pm, args, index, tidx)
|
||||
tsr.add_resultdata(res)
|
||||
except PluginMgrTestFail as pmtf:
|
||||
ex_type, ex, ex_tb = sys.exc_info()
|
||||
stage = pmtf.stage
|
||||
message = pmtf.message
|
||||
output = pmtf.output
|
||||
res = TestResult(tidx['id'], tidx['name'])
|
||||
res.set_result(ResultState.skip)
|
||||
res.set_errormsg(pmtf.message)
|
||||
res.set_failmsg(pmtf.output)
|
||||
tsr.add_resultdata(res)
|
||||
index += 1
|
||||
print(message)
|
||||
print('Exception {} {} (caught in test_runner, running test {} {} {} stage {})'.
|
||||
format(ex_type, ex, index, tidx['id'], tidx['name'], stage))
|
||||
|
@ -326,16 +324,16 @@ def test_runner(pm, args, filtered_tests):
|
|||
# if we failed in setup or teardown,
|
||||
# fill in the remaining tests with ok-skipped
|
||||
count = index
|
||||
if not args.notap:
|
||||
tap += 'about to flush the tap output if tests need to be skipped\n'
|
||||
if tcount + 1 != index:
|
||||
for tidx in testlist[index - 1:]:
|
||||
msg = 'skipped - previous {} failed'.format(stage)
|
||||
tap += 'ok {} - {} # {} {} {}\n'.format(
|
||||
count, tidx['id'], msg, index, badtest.get('id', '--Unknown--'))
|
||||
count += 1
|
||||
|
||||
tap += 'done flushing skipped test tap output\n'
|
||||
if tcount + 1 != count:
|
||||
for tidx in testlist[count - 1:]:
|
||||
res = TestResult(tidx['id'], tidx['name'])
|
||||
res.set_result(ResultState.skip)
|
||||
msg = 'skipped - previous {} failed {} {}'.format(stage,
|
||||
index, badtest.get('id', '--Unknown--'))
|
||||
res.set_errormsg(msg)
|
||||
tsr.add_resultdata(res)
|
||||
count += 1
|
||||
|
||||
if args.pause:
|
||||
print('Want to pause\nPress enter to continue ...')
|
||||
|
@ -344,7 +342,7 @@ def test_runner(pm, args, filtered_tests):
|
|||
|
||||
pm.call_post_suite(index)
|
||||
|
||||
return tap
|
||||
return tsr
|
||||
|
||||
def has_blank_ids(idlist):
|
||||
"""
|
||||
|
@ -384,6 +382,10 @@ def set_args(parser):
|
|||
"""
|
||||
Set the command line arguments for tdc.
|
||||
"""
|
||||
parser.add_argument(
|
||||
'--outfile', type=str,
|
||||
help='Path to the file in which results should be saved. ' +
|
||||
'Default target is the current directory.')
|
||||
parser.add_argument(
|
||||
'-p', '--path', type=str,
|
||||
help='The full path to the tc executable to use')
|
||||
|
@ -420,8 +422,9 @@ def set_args(parser):
|
|||
'-v', '--verbose', action='count', default=0,
|
||||
help='Show the commands that are being run')
|
||||
parser.add_argument(
|
||||
'-N', '--notap', action='store_true',
|
||||
help='Suppress tap results for command under test')
|
||||
'--format', default='tap', const='tap', nargs='?',
|
||||
choices=['none', 'xunit', 'tap'],
|
||||
help='Specify the format for test results. (Default: TAP)')
|
||||
parser.add_argument('-d', '--device',
|
||||
help='Execute the test case in flower category')
|
||||
parser.add_argument(
|
||||
|
@ -638,12 +641,30 @@ def set_operation_mode(pm, args):
|
|||
|
||||
if len(alltests):
|
||||
catresults = test_runner(pm, args, alltests)
|
||||
if args.format == 'none':
|
||||
print('Test results output suppression requested\n')
|
||||
else:
|
||||
print('\nAll test results: \n')
|
||||
if args.format == 'xunit':
|
||||
suffix = 'xml'
|
||||
res = catresults.format_xunit()
|
||||
elif args.format == 'tap':
|
||||
suffix = 'tap'
|
||||
res = catresults.format_tap()
|
||||
print(res)
|
||||
print('\n\n')
|
||||
if not args.outfile:
|
||||
fname = 'test-results.{}'.format(suffix)
|
||||
else:
|
||||
fname = args.outfile
|
||||
with open(fname, 'w') as fh:
|
||||
fh.write(res)
|
||||
fh.close()
|
||||
if os.getenv('SUDO_UID') is not None:
|
||||
os.chown(fname, uid=int(os.getenv('SUDO_UID')),
|
||||
gid=int(os.getenv('SUDO_GID')))
|
||||
else:
|
||||
catresults = 'No tests found\n'
|
||||
if args.notap:
|
||||
print('Tap output suppression requested\n')
|
||||
else:
|
||||
print('All test results: \n\n{}'.format(catresults))
|
||||
print('No tests found\n')
|
||||
|
||||
def main():
|
||||
"""
|
||||
|
|
Loading…
Reference in New Issue