2017-02-20 21:12:49 +00:00
|
|
|
# Yocto Project compatibility layer tool
|
|
|
|
#
|
|
|
|
# Copyright (C) 2017 Intel Corporation
|
|
|
|
# Released under the MIT license (see COPYING.MIT)
|
|
|
|
|
|
|
|
import os
|
2017-04-05 13:36:04 +00:00
|
|
|
import subprocess
|
2017-02-20 21:12:49 +00:00
|
|
|
from enum import Enum
|
|
|
|
|
2017-04-05 13:36:05 +00:00
|
|
|
import bb.tinfoil
|
|
|
|
|
2017-02-20 21:12:49 +00:00
|
|
|
class LayerType(Enum):
|
|
|
|
BSP = 0
|
|
|
|
DISTRO = 1
|
|
|
|
SOFTWARE = 2
|
|
|
|
ERROR_NO_LAYER_CONF = 98
|
|
|
|
ERROR_BSP_DISTRO = 99
|
|
|
|
|
|
|
|
def _get_configurations(path):
|
|
|
|
configs = []
|
|
|
|
|
|
|
|
for f in os.listdir(path):
|
|
|
|
file_path = os.path.join(path, f)
|
|
|
|
if os.path.isfile(file_path) and f.endswith('.conf'):
|
|
|
|
configs.append(f[:-5]) # strip .conf
|
|
|
|
return configs
|
|
|
|
|
|
|
|
def _get_layer_collections(layer_path, lconf=None, data=None):
|
|
|
|
import bb.parse
|
|
|
|
import bb.data
|
|
|
|
|
|
|
|
if lconf is None:
|
|
|
|
lconf = os.path.join(layer_path, 'conf', 'layer.conf')
|
|
|
|
|
|
|
|
if data is None:
|
|
|
|
ldata = bb.data.init()
|
|
|
|
bb.parse.init_parser(ldata)
|
|
|
|
else:
|
|
|
|
ldata = data.createCopy()
|
|
|
|
|
|
|
|
ldata.setVar('LAYERDIR', layer_path)
|
|
|
|
try:
|
|
|
|
ldata = bb.parse.handle(lconf, ldata, include=True)
|
|
|
|
except BaseException as exc:
|
|
|
|
raise LayerError(exc)
|
|
|
|
ldata.expandVarref('LAYERDIR')
|
|
|
|
|
|
|
|
collections = (ldata.getVar('BBFILE_COLLECTIONS', True) or '').split()
|
|
|
|
if not collections:
|
|
|
|
name = os.path.basename(layer_path)
|
|
|
|
collections = [name]
|
|
|
|
|
|
|
|
collections = {c: {} for c in collections}
|
|
|
|
for name in collections:
|
|
|
|
priority = ldata.getVar('BBFILE_PRIORITY_%s' % name, True)
|
|
|
|
pattern = ldata.getVar('BBFILE_PATTERN_%s' % name, True)
|
|
|
|
depends = ldata.getVar('LAYERDEPENDS_%s' % name, True)
|
|
|
|
collections[name]['priority'] = priority
|
|
|
|
collections[name]['pattern'] = pattern
|
|
|
|
collections[name]['depends'] = depends
|
|
|
|
|
|
|
|
return collections
|
|
|
|
|
|
|
|
def _detect_layer(layer_path):
|
|
|
|
"""
|
|
|
|
Scans layer directory to detect what type of layer
|
|
|
|
is BSP, Distro or Software.
|
|
|
|
|
|
|
|
Returns a dictionary with layer name, type and path.
|
|
|
|
"""
|
|
|
|
|
|
|
|
layer = {}
|
|
|
|
layer_name = os.path.basename(layer_path)
|
|
|
|
|
|
|
|
layer['name'] = layer_name
|
|
|
|
layer['path'] = layer_path
|
|
|
|
layer['conf'] = {}
|
|
|
|
|
|
|
|
if not os.path.isfile(os.path.join(layer_path, 'conf', 'layer.conf')):
|
|
|
|
layer['type'] = LayerType.ERROR_NO_LAYER_CONF
|
|
|
|
return layer
|
|
|
|
|
|
|
|
machine_conf = os.path.join(layer_path, 'conf', 'machine')
|
|
|
|
distro_conf = os.path.join(layer_path, 'conf', 'distro')
|
|
|
|
|
|
|
|
is_bsp = False
|
|
|
|
is_distro = False
|
|
|
|
|
|
|
|
if os.path.isdir(machine_conf):
|
|
|
|
machines = _get_configurations(machine_conf)
|
|
|
|
if machines:
|
|
|
|
is_bsp = True
|
|
|
|
|
|
|
|
if os.path.isdir(distro_conf):
|
|
|
|
distros = _get_configurations(distro_conf)
|
|
|
|
if distros:
|
|
|
|
is_distro = True
|
|
|
|
|
|
|
|
if is_bsp and is_distro:
|
|
|
|
layer['type'] = LayerType.ERROR_BSP_DISTRO
|
|
|
|
elif is_bsp:
|
|
|
|
layer['type'] = LayerType.BSP
|
|
|
|
layer['conf']['machines'] = machines
|
|
|
|
elif is_distro:
|
|
|
|
layer['type'] = LayerType.DISTRO
|
|
|
|
layer['conf']['distros'] = distros
|
|
|
|
else:
|
|
|
|
layer['type'] = LayerType.SOFTWARE
|
|
|
|
|
|
|
|
layer['collections'] = _get_layer_collections(layer['path'])
|
|
|
|
|
|
|
|
return layer
|
|
|
|
|
2017-03-20 23:33:25 +00:00
|
|
|
def detect_layers(layer_directories, no_auto):
|
2017-02-20 21:12:49 +00:00
|
|
|
layers = []
|
|
|
|
|
|
|
|
for directory in layer_directories:
|
2017-03-29 21:44:09 +00:00
|
|
|
directory = os.path.realpath(directory)
|
2017-02-20 21:12:49 +00:00
|
|
|
if directory[-1] == '/':
|
|
|
|
directory = directory[0:-1]
|
|
|
|
|
2017-03-20 23:33:25 +00:00
|
|
|
if no_auto:
|
|
|
|
conf_dir = os.path.join(directory, 'conf')
|
2017-03-20 23:33:24 +00:00
|
|
|
if os.path.isdir(conf_dir):
|
2017-03-20 23:33:25 +00:00
|
|
|
layer = _detect_layer(directory)
|
2017-02-20 21:12:49 +00:00
|
|
|
if layer:
|
|
|
|
layers.append(layer)
|
2017-03-20 23:33:25 +00:00
|
|
|
else:
|
|
|
|
for root, dirs, files in os.walk(directory):
|
|
|
|
dir_name = os.path.basename(root)
|
|
|
|
conf_dir = os.path.join(root, 'conf')
|
|
|
|
if os.path.isdir(conf_dir):
|
|
|
|
layer = _detect_layer(root)
|
|
|
|
if layer:
|
|
|
|
layers.append(layer)
|
2017-02-20 21:12:49 +00:00
|
|
|
|
|
|
|
return layers
|
|
|
|
|
2017-03-20 23:33:26 +00:00
|
|
|
def _find_layer_depends(depend, layers):
|
|
|
|
for layer in layers:
|
|
|
|
for collection in layer['collections']:
|
|
|
|
if depend == collection:
|
|
|
|
return layer
|
|
|
|
return None
|
|
|
|
|
2017-03-31 02:30:29 +00:00
|
|
|
def add_layer_dependencies(bblayersconf, layer, layers, logger):
|
2017-03-31 02:30:28 +00:00
|
|
|
def recurse_dependencies(depends, layer, layers, logger, ret = []):
|
|
|
|
logger.debug('Processing dependencies %s for layer %s.' % \
|
|
|
|
(depends, layer['name']))
|
2017-03-27 17:05:30 +00:00
|
|
|
|
|
|
|
for depend in depends.split():
|
2017-03-20 23:33:26 +00:00
|
|
|
# core (oe-core) is suppose to be provided
|
|
|
|
if depend == 'core':
|
|
|
|
continue
|
|
|
|
|
|
|
|
layer_depend = _find_layer_depends(depend, layers)
|
|
|
|
if not layer_depend:
|
|
|
|
logger.error('Layer %s depends on %s and isn\'t found.' % \
|
|
|
|
(layer['name'], depend))
|
2017-03-31 02:30:28 +00:00
|
|
|
ret = None
|
|
|
|
continue
|
|
|
|
|
|
|
|
# We keep processing, even if ret is None, this allows us to report
|
|
|
|
# multiple errors at once
|
|
|
|
if ret is not None and layer_depend not in ret:
|
|
|
|
ret.append(layer_depend)
|
|
|
|
|
|
|
|
# Recursively process...
|
|
|
|
if 'collections' not in layer_depend:
|
|
|
|
continue
|
|
|
|
|
|
|
|
for collection in layer_depend['collections']:
|
|
|
|
collect_deps = layer_depend['collections'][collection]['depends']
|
|
|
|
if not collect_deps:
|
|
|
|
continue
|
|
|
|
ret = recurse_dependencies(collect_deps, layer_depend, layers, logger, ret)
|
2017-03-20 23:33:26 +00:00
|
|
|
|
2017-03-31 02:30:28 +00:00
|
|
|
return ret
|
|
|
|
|
|
|
|
layer_depends = []
|
|
|
|
for collection in layer['collections']:
|
|
|
|
depends = layer['collections'][collection]['depends']
|
|
|
|
if not depends:
|
|
|
|
continue
|
|
|
|
|
|
|
|
layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends)
|
|
|
|
|
|
|
|
# Note: [] (empty) is allowed, None is not!
|
|
|
|
if layer_depends is None:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
for layer_depend in layer_depends:
|
2017-03-20 23:33:26 +00:00
|
|
|
logger.info('Adding layer dependency %s' % layer_depend['name'])
|
|
|
|
with open(bblayersconf, 'a+') as f:
|
|
|
|
f.write("\nBBLAYERS += \"%s\"\n" % layer_depend['path'])
|
2017-03-31 02:30:29 +00:00
|
|
|
return True
|
2017-03-20 23:33:26 +00:00
|
|
|
|
2017-03-31 02:30:29 +00:00
|
|
|
def add_layer(bblayersconf, layer, layers, logger):
|
|
|
|
logger.info('Adding layer %s' % layer['name'])
|
2017-02-20 21:12:49 +00:00
|
|
|
with open(bblayersconf, 'a+') as f:
|
|
|
|
f.write("\nBBLAYERS += \"%s\"\n" % layer['path'])
|
|
|
|
|
2017-03-20 23:33:26 +00:00
|
|
|
return True
|
|
|
|
|
2017-04-05 13:36:04 +00:00
|
|
|
def check_command(error_msg, cmd):
|
|
|
|
'''
|
|
|
|
Run a command under a shell, capture stdout and stderr in a single stream,
|
|
|
|
throw an error when command returns non-zero exit code. Returns the output.
|
|
|
|
'''
|
|
|
|
|
|
|
|
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
output, _ = p.communicate()
|
|
|
|
if p.returncode:
|
|
|
|
msg = "%s\nCommand: %s\nOutput:\n%s" % (error_msg, cmd, output.decode('utf-8'))
|
|
|
|
raise RuntimeError(msg)
|
|
|
|
return output
|
|
|
|
|
2017-04-12 15:44:25 +00:00
|
|
|
def get_signatures(builddir, failsafe=False, machine=None):
|
2017-02-20 21:12:49 +00:00
|
|
|
import re
|
|
|
|
|
2017-03-20 21:38:00 +00:00
|
|
|
# some recipes needs to be excluded like meta-world-pkgdata
|
|
|
|
# because a layer can add recipes to a world build so signature
|
|
|
|
# will be change
|
|
|
|
exclude_recipes = ('meta-world-pkgdata',)
|
|
|
|
|
2017-02-20 21:12:49 +00:00
|
|
|
sigs = {}
|
2017-04-11 18:38:40 +00:00
|
|
|
tune2tasks = {}
|
2017-02-20 21:12:49 +00:00
|
|
|
|
2017-04-12 15:44:25 +00:00
|
|
|
cmd = ''
|
|
|
|
if machine:
|
|
|
|
cmd += 'MACHINE=%s ' % machine
|
|
|
|
cmd += 'bitbake '
|
yocto-compat-layer: improve error handling in signature creation
When "bitbake -k -S none world" failed, the error printed by
yocto-compat-layer.py contained the stack trace multiple times and did not
contain the stderr output from bitbake, making the error hard to understand
and debug:
INFO: ======================================================================
INFO: ERROR: test_signatures (common.CommonCompatLayer)
INFO: ----------------------------------------------------------------------
INFO: Traceback (most recent call last):
File "/fast/work/poky/scripts/lib/compatlayer/__init__.py", line 144, in get_signatures
stderr=subprocess.PIPE)
File "/usr/lib/python3.4/subprocess.py", line 620, in check_output
raise CalledProcessError(retcode, process.args, output=output)
subprocess.CalledProcessError: Command 'bitbake -k -S none world' returned non-zero exit status 1
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/fast/work/poky/scripts/lib/compatlayer/cases/common.py", line 51, in test_signatures
curr_sigs = get_signatures(self.td['builddir'], failsafe=True)
File "/fast/work/poky/scripts/lib/compatlayer/__init__.py", line 149, in get_signatures
raise RuntimeError(msg)
RuntimeError: Traceback (most recent call last):
File "/fast/work/poky/scripts/lib/compatlayer/__init__.py", line 144, in get_signatures
stderr=subprocess.PIPE)
File "/usr/lib/python3.4/subprocess.py", line 620, in check_output
raise CalledProcessError(retcode, process.args, output=output)
subprocess.CalledProcessError: Command 'bitbake -k -S none world' returned non-zero exit status 1
Loading cache...done.
Loaded 1328 entries from dependency cache.
NOTE: Resolving any missing task queue dependencies
NOTE: Runtime target 'zlib-qat' is unbuildable, removing...
Missing or unbuildable dependency chain was: ['zlib-qat']
...
Summary: There were 5 ERROR messages shown, returning a non-zero exit code.
The yocto-compat-layer.log was incomplete, it only had the first part
without the command output.
stderr was missing due to stderr=subprocess.PIPE.
Instead of the complicated try/except construct it's better to check
the return code ourselves and raise just a single exception. The
output (both on stderr and in the yocto-compat-layer.log) now is:
INFO: ======================================================================
INFO: ERROR: test_signatures (common.CommonCompatLayer)
INFO: ----------------------------------------------------------------------
INFO: Traceback (most recent call last):
File "/fast/work/poky/scripts/lib/compatlayer/cases/common.py", line 51, in test_signatures
curr_sigs = get_signatures(self.td['builddir'], failsafe=True)
File "/fast/work/poky/scripts/lib/compatlayer/__init__.py", line 147, in get_signatures
raise RuntimeError(msg)
RuntimeError: Generating signatures failed. This might be due to some parse error and/or general layer incompatibilities.
Command: bitbake -k -S none world
Output:
Loading cache...done.
Loaded 1328 entries from dependency cache.
NOTE: Resolving any missing task queue dependencies
ERROR: Nothing PROVIDES 'qat16' (but /fast/work/meta-intel/common/recipes-extended/openssl-qat/openssl-qat_0.4.9-009.bb DEPENDS on or otherwise requires it)
ERROR: qat16 was skipped: incompatible with machine qemux86 (not in COMPATIBLE_MACHINE)
...
Missing or unbuildable dependency chain was: ['openssl-qat-dev']
...
Summary: There were 5 ERROR messages shown, returning a non-zero exit code.
(From OE-Core rev: 5b9ac62ab535d2791b9713857e1016f49f53dd8d)
Signed-off-by: Patrick Ohly <patrick.ohly@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2017-03-15 10:01:34 +00:00
|
|
|
if failsafe:
|
|
|
|
cmd += '-k '
|
|
|
|
cmd += '-S none world'
|
2017-02-20 21:12:49 +00:00
|
|
|
sigs_file = os.path.join(builddir, 'locked-sigs.inc')
|
2017-04-12 15:44:26 +00:00
|
|
|
if os.path.exists(sigs_file):
|
|
|
|
os.unlink(sigs_file)
|
|
|
|
try:
|
|
|
|
check_command('Generating signatures failed. This might be due to some parse error and/or general layer incompatibilities.',
|
|
|
|
cmd)
|
|
|
|
except RuntimeError as ex:
|
|
|
|
if failsafe and os.path.exists(sigs_file):
|
|
|
|
# Ignore the error here. Most likely some recipes active
|
|
|
|
# in a world build lack some dependencies. There is a
|
|
|
|
# separate test_machine_world_build which exposes the
|
|
|
|
# failure.
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise
|
2017-02-20 21:12:49 +00:00
|
|
|
|
|
|
|
sig_regex = re.compile("^(?P<task>.*:.*):(?P<hash>.*) .$")
|
2017-04-11 18:38:40 +00:00
|
|
|
tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
|
|
|
|
current_tune = None
|
2017-02-20 21:12:49 +00:00
|
|
|
with open(sigs_file, 'r') as f:
|
|
|
|
for line in f.readlines():
|
|
|
|
line = line.strip()
|
2017-04-11 18:38:40 +00:00
|
|
|
t = tune_regex.search(line)
|
|
|
|
if t:
|
|
|
|
current_tune = t.group('tune')
|
2017-02-20 21:12:49 +00:00
|
|
|
s = sig_regex.match(line)
|
|
|
|
if s:
|
2017-03-20 21:38:00 +00:00
|
|
|
exclude = False
|
|
|
|
for er in exclude_recipes:
|
|
|
|
(recipe, task) = s.group('task').split(':')
|
|
|
|
if er == recipe:
|
|
|
|
exclude = True
|
|
|
|
break
|
|
|
|
if exclude:
|
|
|
|
continue
|
|
|
|
|
2017-02-20 21:12:49 +00:00
|
|
|
sigs[s.group('task')] = s.group('hash')
|
2017-04-11 18:38:40 +00:00
|
|
|
tune2tasks.setdefault(current_tune, []).append(s.group('task'))
|
2017-02-20 21:12:49 +00:00
|
|
|
|
|
|
|
if not sigs:
|
|
|
|
raise RuntimeError('Can\'t load signatures from %s' % sigs_file)
|
|
|
|
|
2017-04-11 18:38:40 +00:00
|
|
|
return (sigs, tune2tasks)
|
2017-04-05 13:36:05 +00:00
|
|
|
|
|
|
|
def get_depgraph(targets=['world']):
|
|
|
|
'''
|
|
|
|
Returns the dependency graph for the given target(s).
|
|
|
|
The dependency graph is taken directly from DepTreeEvent.
|
|
|
|
'''
|
|
|
|
depgraph = None
|
|
|
|
with bb.tinfoil.Tinfoil() as tinfoil:
|
|
|
|
tinfoil.prepare(config_only=False)
|
|
|
|
tinfoil.set_event_mask(['bb.event.NoProvider', 'bb.event.DepTreeGenerated', 'bb.command.CommandCompleted'])
|
|
|
|
if not tinfoil.run_command('generateDepTreeEvent', targets, 'do_build'):
|
|
|
|
raise RuntimeError('starting generateDepTreeEvent failed')
|
|
|
|
while True:
|
|
|
|
event = tinfoil.wait_event(timeout=1000)
|
|
|
|
if event:
|
|
|
|
if isinstance(event, bb.command.CommandFailed):
|
|
|
|
raise RuntimeError('Generating dependency information failed: %s' % event.error)
|
|
|
|
elif isinstance(event, bb.command.CommandCompleted):
|
|
|
|
break
|
|
|
|
elif isinstance(event, bb.event.NoProvider):
|
|
|
|
if event._reasons:
|
|
|
|
raise RuntimeError('Nothing provides %s: %s' % (event._item, event._reasons))
|
|
|
|
else:
|
|
|
|
raise RuntimeError('Nothing provides %s.' % (event._item))
|
|
|
|
elif isinstance(event, bb.event.DepTreeGenerated):
|
|
|
|
depgraph = event._depgraph
|
|
|
|
|
|
|
|
if depgraph is None:
|
|
|
|
raise RuntimeError('Could not retrieve the depgraph.')
|
|
|
|
return depgraph
|