oeqa.buildperf: implement BuildPerfTestRunner class
The new class is responsible for actually running the tests and processing their results. This commit also adds a decorator function for adding new tests. No automatic test discovery, at least yet. (From OE-Core rev: bf90aecb7e150d6bfac7240286c797b79d26528b) Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com> Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
6512d6956b
commit
1d88659ef6
|
@ -10,4 +10,5 @@
|
|||
# more details.
|
||||
#
|
||||
"""Build performance tests"""
|
||||
from .base import BuildPerfTest, KernelDropCaches
|
||||
from .base import (build_perf_test, BuildPerfTest, BuildPerfTestRunner,
|
||||
KernelDropCaches)
|
||||
|
|
|
@ -15,8 +15,10 @@ import logging
|
|||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from oeqa.utils.commands import runCmd, get_bb_vars
|
||||
|
@ -72,6 +74,51 @@ def time_cmd(cmd, **kwargs):
|
|||
return ret, timedata
|
||||
|
||||
|
||||
class BuildPerfTestRunner(object):
|
||||
"""Runner class for executing the individual tests"""
|
||||
# List of test cases to run
|
||||
test_run_queue = []
|
||||
|
||||
def __init__(self, out_dir):
|
||||
self.results = {}
|
||||
self.out_dir = os.path.abspath(out_dir)
|
||||
if not os.path.exists(self.out_dir):
|
||||
os.makedirs(self.out_dir)
|
||||
|
||||
|
||||
def run_tests(self):
|
||||
"""Method that actually runs the tests"""
|
||||
self.results['schema_version'] = 1
|
||||
self.results['tester_host'] = socket.gethostname()
|
||||
start_time = datetime.utcnow()
|
||||
self.results['start_time'] = start_time
|
||||
self.results['tests'] = {}
|
||||
|
||||
for test_class in self.test_run_queue:
|
||||
log.info("Executing test %s: %s", test_class.name,
|
||||
test_class.description)
|
||||
|
||||
test = test_class(self.out_dir)
|
||||
try:
|
||||
test.run()
|
||||
except Exception:
|
||||
# Catch all exceptions. This way e.g buggy tests won't scrap
|
||||
# the whole test run
|
||||
sep = '-' * 5 + ' TRACEBACK ' + '-' * 60 + '\n'
|
||||
tb_msg = sep + traceback.format_exc() + sep
|
||||
log.error("Test execution failed with:\n" + tb_msg)
|
||||
self.results['tests'][test.name] = test.results
|
||||
|
||||
self.results['elapsed_time'] = datetime.utcnow() - start_time
|
||||
return 0
|
||||
|
||||
|
||||
def perf_test_case(obj):
|
||||
"""Decorator for adding test classes"""
|
||||
BuildPerfTestRunner.test_run_queue.append(obj)
|
||||
return obj
|
||||
|
||||
|
||||
class BuildPerfTest(object):
|
||||
"""Base class for build performance tests"""
|
||||
SYSRES = 'sysres'
|
||||
|
|
|
@ -18,11 +18,12 @@ import argparse
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib')
|
||||
import scriptpath
|
||||
scriptpath.add_oe_lib_path()
|
||||
from oeqa.buildperf import KernelDropCaches
|
||||
from oeqa.buildperf import BuildPerfTestRunner, KernelDropCaches
|
||||
from oeqa.utils.commands import runCmd
|
||||
|
||||
|
||||
|
@ -75,7 +76,12 @@ def main(argv=None):
|
|||
# Check our capability to drop caches and ask pass if needed
|
||||
KernelDropCaches.check()
|
||||
|
||||
return 0
|
||||
# Run actual tests
|
||||
out_dir = 'results-{}'.format(datetime.now().strftime('%Y%m%d%H%M%S'))
|
||||
runner = BuildPerfTestRunner(out_dir)
|
||||
ret = runner.run_tests()
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Loading…
Reference in New Issue