oe-build-perf-test: pack all buildstat in one file

Write out all buildstats into one big json file, instead of using
multiple per-measurement files. Individual buildstats will be indexed
using "<test_name>.<measurement_name>" as the key. Also, changes the
per-testcase working directories into temporary directories that will be
removed after test execution as there are no more per-testcase data files
to store permanently.

[YOCTO #10582]

(From OE-Core rev: a7f2e8915db379021f3409ca640de5d3b054a830)

Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Markus Lehtonen 2017-03-24 16:17:25 +02:00 committed by Richard Purdie
parent 867c2dcbd7
commit 7132f54041
3 changed files with 36 additions and 31 deletions

View File

@ -16,6 +16,7 @@ import os
import re
import resource
import socket
import shutil
import time
import unittest
import xml.etree.ElementTree as ET
@ -127,7 +128,6 @@ class BuildPerfTestResult(unittest.TextTestResult):
def startTest(self, test):
"""Pre-test hook"""
test.base_dir = self.out_dir
os.mkdir(test.out_dir)
log.info("Executing test %s: %s", test.name, test.shortDescription())
self.stream.write(datetime.now().strftime("[%Y-%m-%d %H:%M:%S] "))
super(BuildPerfTestResult, self).startTest(test)
@ -150,6 +150,16 @@ class BuildPerfTestResult(unittest.TextTestResult):
return sorted(compound, key=lambda info: info[1].start_time)
def write_buildstats_json(self):
"""Write buildstats file"""
buildstats = OrderedDict()
for _, test, _ in self.all_results():
for key, val in test.buildstats.items():
buildstats[test.name + '.' + key] = val
with open(os.path.join(self.out_dir, 'buildstats.json'), 'w') as fobj:
json.dump(buildstats, fobj, cls=ResultsJsonEncoder)
def write_results_json(self):
"""Write test results into a json-formatted file"""
results = OrderedDict([('tester_host', self.hostname),
@ -221,8 +231,6 @@ class BuildPerfTestResult(unittest.TextTestResult):
ET.SubElement(measurement, 'time',
timestamp=vals['start_time'].isoformat()).text = \
str(vals['elapsed_time'].total_seconds())
if 'buildstats_file' in vals:
ET.SubElement(measurement, 'buildstats_file').text = vals['buildstats_file']
attrib = dict((k, str(v)) for k, v in vals['iostat'].items())
ET.SubElement(measurement, 'iostat', attrib=attrib)
attrib = dict((k, str(v)) for k, v in vals['rusage'].items())
@ -238,7 +246,6 @@ class BuildPerfTestResult(unittest.TextTestResult):
dom_doc = minidom.parseString(ET.tostring(top, 'utf-8'))
with open(os.path.join(self.out_dir, 'results.xml'), 'w') as fobj:
dom_doc.writexml(fobj, addindent=' ', newl='\n', encoding='utf-8')
return
class BuildPerfTestCase(unittest.TestCase):
@ -254,6 +261,7 @@ class BuildPerfTestCase(unittest.TestCase):
self.start_time = None
self.elapsed_time = None
self.measurements = OrderedDict()
self.buildstats = OrderedDict()
# self.err is supposed to be a tuple from sys.exc_info()
self.err = None
self.bb_vars = get_bb_vars()
@ -263,17 +271,24 @@ class BuildPerfTestCase(unittest.TestCase):
self.sizes = []
@property
def out_dir(self):
return os.path.join(self.base_dir, self.name)
def tmp_dir(self):
return os.path.join(self.base_dir, self.name + '.tmp')
def shortDescription(self):
return super(BuildPerfTestCase, self).shortDescription() or ""
def setUp(self):
"""Set-up fixture for each test"""
if not os.path.isdir(self.tmp_dir):
os.mkdir(self.tmp_dir)
if self.build_target:
self.run_cmd(['bitbake', self.build_target, '-c', 'fetchall'])
def tearDown(self):
"""Tear-down fixture for each test"""
if os.path.isdir(self.tmp_dir):
shutil.rmtree(self.tmp_dir)
def run(self, *args, **kwargs):
"""Run test"""
self.start_time = datetime.now()
@ -349,9 +364,7 @@ class BuildPerfTestCase(unittest.TestCase):
('rusage', data['rusage']),
('iostat', data['iostat'])])
if save_bs:
bs_file = self.save_buildstats(legend)
measurement['values']['buildstats_file'] = \
os.path.relpath(bs_file, self.base_dir)
self.save_buildstats(name)
self._append_measurement(measurement)
@ -379,7 +392,7 @@ class BuildPerfTestCase(unittest.TestCase):
# Append to 'sizes' array for globalres log
self.sizes.append(str(size))
def save_buildstats(self, label=None):
def save_buildstats(self, measurement_name):
"""Save buildstats"""
def split_nevr(nevr):
"""Split name and version information from recipe "nevr" string"""
@ -451,14 +464,7 @@ class BuildPerfTestCase(unittest.TestCase):
task))
buildstats.append(recipe_bs)
# Write buildstats into json file
postfix = '.' + str_to_fn(label) if label else ''
postfix += '.json'
outfile = os.path.join(self.out_dir, 'buildstats' + postfix)
with open(outfile, 'w') as fobj:
json.dump(buildstats, fobj, indent=4, sort_keys=True,
cls=ResultsJsonEncoder)
return outfile
self.buildstats[measurement_name] = buildstats
def rm_tmp(self):
"""Cleanup temporary/intermediate files and directories"""

View File

@ -51,21 +51,19 @@ class Test1P3(BuildPerfTestCase):
def test13(self):
"""Build core-image-sato with rm_work enabled"""
postfile = os.path.join(self.out_dir, 'postfile.conf')
postfile = os.path.join(self.tmp_dir, 'postfile.conf')
with open(postfile, 'w') as fobj:
fobj.write('INHERIT += "rm_work"\n')
try:
self.rm_tmp()
self.rm_sstate()
self.rm_cache()
self.sync()
cmd = ['bitbake', '-R', postfile, self.build_target]
self.measure_cmd_resources(cmd, 'build',
'bitbake' + self.build_target,
save_bs=True)
self.measure_disk_usage(self.bb_vars['TMPDIR'], 'tmpdir', 'tmpdir')
finally:
os.unlink(postfile)
self.rm_tmp()
self.rm_sstate()
self.rm_cache()
self.sync()
cmd = ['bitbake', '-R', postfile, self.build_target]
self.measure_cmd_resources(cmd, 'build',
'bitbake' + self.build_target,
save_bs=True)
self.measure_disk_usage(self.bb_vars['TMPDIR'], 'tmpdir', 'tmpdir')
class Test2(BuildPerfTestCase):

View File

@ -209,6 +209,7 @@ def main(argv=None):
result.write_results_xml()
else:
result.write_results_json()
result.write_buildstats_json()
if args.globalres_file:
update_globalres_file(result, args.globalres_file, metadata)
if result.wasSuccessful():