2013-09-26 11:50:50 +00:00
|
|
|
#
|
|
|
|
# BitBake ToasterUI Implementation
|
|
|
|
#
|
|
|
|
# Copyright (C) 2013 Intel Corporation
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import bb
|
|
|
|
import re
|
2015-08-18 16:28:58 +00:00
|
|
|
import os
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-12-10 03:56:30 +00:00
|
|
|
import django
|
2015-06-17 16:30:34 +00:00
|
|
|
from django.utils import timezone
|
2015-08-18 16:28:58 +00:00
|
|
|
|
2016-05-19 12:59:28 +00:00
|
|
|
import toaster
|
|
|
|
# Add toaster module to the search path to help django.setup() find the right
|
|
|
|
# modules
|
|
|
|
sys.path.insert(0, os.path.dirname(toaster.__file__))
|
2015-08-18 16:28:58 +00:00
|
|
|
|
2016-05-19 12:59:28 +00:00
|
|
|
#Set the DJANGO_SETTINGS_MODULE if it's not already set
|
|
|
|
os.environ["DJANGO_SETTINGS_MODULE"] =\
|
|
|
|
os.environ.get("DJANGO_SETTINGS_MODULE",
|
|
|
|
"toaster.toastermain.settings")
|
|
|
|
# Setup django framework (needs to be done before importing modules)
|
2015-12-10 03:56:30 +00:00
|
|
|
django.setup()
|
|
|
|
|
2015-12-10 03:56:28 +00:00
|
|
|
from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
|
|
|
|
from orm.models import Target_Image_File, BuildArtifact
|
|
|
|
from orm.models import Variable, VariableHistory
|
|
|
|
from orm.models import Package, Package_File, Target_Installed_Package, Target_File
|
|
|
|
from orm.models import Task_Dependency, Package_Dependency
|
2016-01-08 11:17:19 +00:00
|
|
|
from orm.models import Recipe_Dependency, Provides
|
2016-03-23 08:28:37 +00:00
|
|
|
from orm.models import Project, CustomImagePackage, CustomImageRecipe
|
2015-12-10 03:56:28 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
from bldcontrol.models import BuildEnvironment, BuildRequest
|
2015-05-14 15:44:27 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
from bb.msg import BBLogFormatter as formatter
|
2014-11-04 16:47:36 +00:00
|
|
|
from django.db import models
|
2015-03-12 12:44:05 +00:00
|
|
|
from pprint import pformat
|
2014-01-20 09:39:34 +00:00
|
|
|
import logging
|
2016-03-08 11:32:12 +00:00
|
|
|
from datetime import datetime, timedelta
|
2014-01-20 09:39:34 +00:00
|
|
|
|
2015-02-26 21:41:58 +00:00
|
|
|
from django.db import transaction, connection
|
2014-01-20 09:39:34 +00:00
|
|
|
|
2016-05-19 12:59:28 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
# pylint: disable=invalid-name
|
|
|
|
# the logger name is standard throughout BitBake
|
2015-06-17 16:30:34 +00:00
|
|
|
logger = logging.getLogger("ToasterLogger")
|
2014-01-20 09:39:34 +00:00
|
|
|
|
2014-03-14 16:59:27 +00:00
|
|
|
class NotExisting(Exception):
|
|
|
|
pass
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
class ORMWrapper(object):
|
|
|
|
""" This class creates the dictionaries needed to store information in the database
|
|
|
|
following the format defined by the Django models. It is also used to save this
|
|
|
|
information in the database.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
2014-11-04 16:47:36 +00:00
|
|
|
self.layer_version_objects = []
|
2015-09-29 04:45:30 +00:00
|
|
|
self.layer_version_built = []
|
2014-11-04 16:47:36 +00:00
|
|
|
self.task_objects = {}
|
|
|
|
self.recipe_objects = {}
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
@staticmethod
|
|
|
|
def _build_key(**kwargs):
|
|
|
|
key = "0"
|
|
|
|
for k in sorted(kwargs.keys()):
|
|
|
|
if isinstance(kwargs[k], models.Model):
|
|
|
|
key += "-%d" % kwargs[k].id
|
|
|
|
else:
|
|
|
|
key += "-%s" % str(kwargs[k])
|
|
|
|
return key
|
|
|
|
|
|
|
|
|
|
|
|
def _cached_get_or_create(self, clazz, **kwargs):
|
|
|
|
""" This is a memory-cached get_or_create. We assume that the objects will not be created in the
|
|
|
|
database through any other means.
|
|
|
|
"""
|
|
|
|
|
|
|
|
assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
|
|
|
|
|
|
|
|
key = ORMWrapper._build_key(**kwargs)
|
|
|
|
dictname = "objects_%s" % clazz.__name__
|
|
|
|
if not dictname in vars(self).keys():
|
|
|
|
vars(self)[dictname] = {}
|
|
|
|
|
|
|
|
created = False
|
|
|
|
if not key in vars(self)[dictname].keys():
|
2015-09-29 04:45:27 +00:00
|
|
|
vars(self)[dictname][key], created = \
|
|
|
|
clazz.objects.get_or_create(**kwargs)
|
2014-11-04 16:47:36 +00:00
|
|
|
|
|
|
|
return (vars(self)[dictname][key], created)
|
|
|
|
|
|
|
|
|
|
|
|
def _cached_get(self, clazz, **kwargs):
|
|
|
|
""" This is a memory-cached get. We assume that the objects will not change in the database between gets.
|
|
|
|
"""
|
|
|
|
assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
|
|
|
|
|
|
|
|
key = ORMWrapper._build_key(**kwargs)
|
|
|
|
dictname = "objects_%s" % clazz.__name__
|
|
|
|
|
|
|
|
if not dictname in vars(self).keys():
|
|
|
|
vars(self)[dictname] = {}
|
|
|
|
|
|
|
|
if not key in vars(self)[dictname].keys():
|
|
|
|
vars(self)[dictname][key] = clazz.objects.get(**kwargs)
|
|
|
|
|
|
|
|
return vars(self)[dictname][key]
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2016-03-08 11:32:12 +00:00
|
|
|
def _timestamp_to_datetime(self, secs):
|
|
|
|
"""
|
|
|
|
Convert timestamp in seconds to Python datetime
|
|
|
|
"""
|
2016-07-06 11:00:35 +00:00
|
|
|
return timezone.make_aware(datetime(1970, 1, 1) + timedelta(seconds=secs))
|
2016-03-08 11:32:12 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
# pylint: disable=no-self-use
|
|
|
|
# we disable detection of no self use in functions because the methods actually work on the object
|
|
|
|
# even if they don't touch self anywhere
|
|
|
|
|
|
|
|
# pylint: disable=bad-continuation
|
|
|
|
# we do not follow the python conventions for continuation indentation due to long lines here
|
|
|
|
|
2015-05-14 15:44:27 +00:00
|
|
|
def create_build_object(self, build_info, brbe, project_id):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert 'machine' in build_info
|
|
|
|
assert 'distro' in build_info
|
|
|
|
assert 'distro_version' in build_info
|
|
|
|
assert 'started_on' in build_info
|
|
|
|
assert 'cooker_log_path' in build_info
|
|
|
|
assert 'build_name' in build_info
|
|
|
|
assert 'bitbake_version' in build_info
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-05-14 15:44:27 +00:00
|
|
|
prj = None
|
|
|
|
buildrequest = None
|
|
|
|
if brbe is not None: # this build was triggered by a request from a user
|
|
|
|
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
|
2015-08-18 16:28:58 +00:00
|
|
|
br, _ = brbe.split(":")
|
2015-05-14 15:44:27 +00:00
|
|
|
buildrequest = BuildRequest.objects.get(pk = br)
|
|
|
|
prj = buildrequest.project
|
|
|
|
|
|
|
|
elif project_id is not None: # this build was triggered by an external system for a specific project
|
|
|
|
logger.debug(1, "buildinfohelper: project is %s" % prj)
|
|
|
|
prj = Project.objects.get(pk = project_id)
|
|
|
|
|
|
|
|
else: # this build was triggered by a legacy system, or command line interactive mode
|
2015-12-10 03:56:29 +00:00
|
|
|
prj = Project.objects.get_or_create_default_project()
|
2015-05-14 15:44:27 +00:00
|
|
|
logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
|
|
|
|
|
|
|
|
|
2015-06-11 17:27:53 +00:00
|
|
|
if buildrequest is not None:
|
|
|
|
build = buildrequest.build
|
2015-08-18 16:28:58 +00:00
|
|
|
logger.info("Updating existing build, with %s", build_info)
|
2015-09-03 00:25:08 +00:00
|
|
|
build.project = prj
|
2015-06-17 16:30:34 +00:00
|
|
|
build.machine=build_info['machine']
|
|
|
|
build.distro=build_info['distro']
|
|
|
|
build.distro_version=build_info['distro_version']
|
|
|
|
build.cooker_log_path=build_info['cooker_log_path']
|
|
|
|
build.build_name=build_info['build_name']
|
2015-06-11 17:27:53 +00:00
|
|
|
build.bitbake_version=build_info['bitbake_version']
|
|
|
|
build.save()
|
|
|
|
|
|
|
|
else:
|
|
|
|
build = Build.objects.create(
|
2015-05-14 15:44:27 +00:00
|
|
|
project = prj,
|
2013-09-26 11:50:50 +00:00
|
|
|
machine=build_info['machine'],
|
|
|
|
distro=build_info['distro'],
|
|
|
|
distro_version=build_info['distro_version'],
|
|
|
|
started_on=build_info['started_on'],
|
2014-02-19 17:26:18 +00:00
|
|
|
completed_on=build_info['started_on'],
|
2013-09-26 11:50:50 +00:00
|
|
|
cooker_log_path=build_info['cooker_log_path'],
|
|
|
|
build_name=build_info['build_name'],
|
|
|
|
bitbake_version=build_info['bitbake_version'])
|
|
|
|
|
2014-01-20 09:39:34 +00:00
|
|
|
logger.debug(1, "buildinfohelper: build is created %s" % build)
|
2014-11-25 13:40:38 +00:00
|
|
|
|
2015-05-14 15:44:27 +00:00
|
|
|
if buildrequest is not None:
|
2014-11-25 13:40:38 +00:00
|
|
|
buildrequest.build = build
|
|
|
|
buildrequest.save()
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
return build
|
|
|
|
|
2015-10-16 17:31:12 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_or_create_targets(target_info):
|
|
|
|
result = []
|
|
|
|
for target in target_info['targets']:
|
|
|
|
task = ''
|
|
|
|
if ':' in target:
|
|
|
|
target, task = target.split(':', 1)
|
|
|
|
if task.startswith('do_'):
|
|
|
|
task = task[3:]
|
|
|
|
if task == 'build':
|
|
|
|
task = ''
|
|
|
|
obj, created = Target.objects.get_or_create(build=target_info['build'],
|
|
|
|
target=target)
|
|
|
|
if created:
|
|
|
|
obj.is_image = False
|
|
|
|
if task:
|
|
|
|
obj.task = task
|
|
|
|
obj.save()
|
|
|
|
result.append(obj)
|
|
|
|
return result
|
2015-09-29 04:45:31 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
def update_build_object(self, build, errors, warnings, taskfailures):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert isinstance(build,Build)
|
|
|
|
assert isinstance(errors, int)
|
|
|
|
assert isinstance(warnings, int)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2016-04-06 16:46:36 +00:00
|
|
|
if build.outcome == Build.CANCELLED:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
if build.buildrequest.state == BuildRequest.REQ_CANCELLING:
|
|
|
|
return
|
|
|
|
except AttributeError:
|
|
|
|
# We may not have a buildrequest if this is a command line build
|
|
|
|
pass
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
outcome = Build.SUCCEEDED
|
|
|
|
if errors or taskfailures:
|
|
|
|
outcome = Build.FAILED
|
|
|
|
|
2015-06-17 16:30:34 +00:00
|
|
|
build.completed_on = timezone.now()
|
2013-09-26 11:50:50 +00:00
|
|
|
build.outcome = outcome
|
|
|
|
build.save()
|
|
|
|
|
2014-12-05 15:14:20 +00:00
|
|
|
def update_target_set_license_manifest(self, target, license_manifest_path):
|
2014-03-12 18:47:40 +00:00
|
|
|
target.license_manifest_path = license_manifest_path
|
|
|
|
target.save()
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2016-03-08 11:32:12 +00:00
|
|
|
def update_task_object(self, build, task_name, recipe_name, task_stats):
|
|
|
|
"""
|
|
|
|
Find the task for build which matches the recipe and task name
|
|
|
|
to be stored
|
|
|
|
"""
|
|
|
|
task_to_update = Task.objects.get(
|
|
|
|
build = build,
|
|
|
|
task_name = task_name,
|
|
|
|
recipe__name = recipe_name
|
|
|
|
)
|
|
|
|
|
2016-04-06 16:46:26 +00:00
|
|
|
if 'started' in task_stats and 'ended' in task_stats:
|
|
|
|
task_to_update.started = self._timestamp_to_datetime(task_stats['started'])
|
|
|
|
task_to_update.ended = self._timestamp_to_datetime(task_stats['ended'])
|
|
|
|
task_to_update.elapsed_time = (task_stats['ended'] - task_stats['started'])
|
|
|
|
task_to_update.cpu_time_user = task_stats.get('cpu_time_user')
|
|
|
|
task_to_update.cpu_time_system = task_stats.get('cpu_time_system')
|
|
|
|
if 'disk_io_read' in task_stats and 'disk_io_write' in task_stats:
|
|
|
|
task_to_update.disk_io_read = task_stats['disk_io_read']
|
|
|
|
task_to_update.disk_io_write = task_stats['disk_io_write']
|
|
|
|
task_to_update.disk_io = task_stats['disk_io_read'] + task_stats['disk_io_write']
|
2016-03-08 11:32:12 +00:00
|
|
|
|
|
|
|
task_to_update.save()
|
|
|
|
|
2014-02-19 17:26:18 +00:00
|
|
|
def get_update_task_object(self, task_information, must_exist = False):
|
|
|
|
assert 'build' in task_information
|
|
|
|
assert 'recipe' in task_information
|
|
|
|
assert 'task_name' in task_information
|
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
# we use must_exist info for database look-up optimization
|
|
|
|
task_object, created = self._cached_get_or_create(Task,
|
|
|
|
build=task_information['build'],
|
|
|
|
recipe=task_information['recipe'],
|
|
|
|
task_name=task_information['task_name']
|
|
|
|
)
|
|
|
|
if created and must_exist:
|
2015-08-18 16:28:58 +00:00
|
|
|
task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
|
|
|
|
raise NotExisting("Task object created when expected to exist", task_information)
|
2014-02-19 17:26:18 +00:00
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
object_changed = False
|
2013-09-26 11:50:50 +00:00
|
|
|
for v in vars(task_object):
|
|
|
|
if v in task_information.keys():
|
2014-11-04 16:47:36 +00:00
|
|
|
if vars(task_object)[v] != task_information[v]:
|
|
|
|
vars(task_object)[v] = task_information[v]
|
|
|
|
object_changed = True
|
2014-02-21 11:59:38 +00:00
|
|
|
|
2015-02-26 21:41:56 +00:00
|
|
|
# update setscene-related information if the task has a setscene
|
|
|
|
if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
|
2014-11-04 16:47:36 +00:00
|
|
|
task_object.outcome = Task.OUTCOME_CACHED
|
|
|
|
object_changed = True
|
2014-02-21 11:59:38 +00:00
|
|
|
|
2013-11-15 11:34:46 +00:00
|
|
|
outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
|
|
|
|
recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
|
|
|
|
if outcome_task_setscene == Task.OUTCOME_SUCCESS:
|
|
|
|
task_object.sstate_result = Task.SSTATE_RESTORED
|
2014-11-04 16:47:36 +00:00
|
|
|
object_changed = True
|
2013-11-15 11:34:46 +00:00
|
|
|
elif outcome_task_setscene == Task.OUTCOME_FAILED:
|
|
|
|
task_object.sstate_result = Task.SSTATE_FAILED
|
2014-11-04 16:47:36 +00:00
|
|
|
object_changed = True
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
if object_changed:
|
|
|
|
task_object.save()
|
2013-09-26 11:50:50 +00:00
|
|
|
return task_object
|
|
|
|
|
|
|
|
|
2014-03-05 14:59:55 +00:00
|
|
|
def get_update_recipe_object(self, recipe_information, must_exist = False):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert 'layer_version' in recipe_information
|
|
|
|
assert 'file_path' in recipe_information
|
2015-05-14 15:10:50 +00:00
|
|
|
assert 'pathflags' in recipe_information
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-05-14 15:10:50 +00:00
|
|
|
assert not recipe_information['file_path'].startswith("/") # we should have layer-relative paths at all times
|
2015-03-11 14:17:21 +00:00
|
|
|
|
2015-09-29 04:45:30 +00:00
|
|
|
|
|
|
|
def update_recipe_obj(recipe_object):
|
|
|
|
object_changed = False
|
|
|
|
for v in vars(recipe_object):
|
|
|
|
if v in recipe_information.keys():
|
|
|
|
object_changed = True
|
|
|
|
vars(recipe_object)[v] = recipe_information[v]
|
|
|
|
|
|
|
|
if object_changed:
|
|
|
|
recipe_object.save()
|
|
|
|
|
|
|
|
recipe, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
|
2015-05-14 15:10:50 +00:00
|
|
|
file_path=recipe_information['file_path'], pathflags = recipe_information['pathflags'])
|
2014-03-05 14:59:55 +00:00
|
|
|
|
2015-09-29 04:45:30 +00:00
|
|
|
update_recipe_obj(recipe)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-10-07 13:55:07 +00:00
|
|
|
built_recipe = None
|
2015-09-29 04:45:30 +00:00
|
|
|
# Create a copy of the recipe for historical puposes and update it
|
|
|
|
for built_layer in self.layer_version_built:
|
|
|
|
if built_layer.layer == recipe_information['layer_version'].layer:
|
|
|
|
built_recipe, c = self._cached_get_or_create(Recipe,
|
|
|
|
layer_version=built_layer,
|
|
|
|
file_path=recipe_information['file_path'],
|
|
|
|
pathflags = recipe_information['pathflags'])
|
|
|
|
update_recipe_obj(built_recipe)
|
|
|
|
break
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-09-29 04:45:30 +00:00
|
|
|
|
2015-12-02 18:02:52 +00:00
|
|
|
# If we're in analysis mode or if this is a custom recipe
|
|
|
|
# then we are wholly responsible for the data
|
2015-10-14 09:09:12 +00:00
|
|
|
# and therefore we return the 'real' recipe rather than the build
|
|
|
|
# history copy of the recipe.
|
2015-12-10 03:56:29 +00:00
|
|
|
if recipe_information['layer_version'].build is not None and \
|
|
|
|
recipe_information['layer_version'].build.project == \
|
|
|
|
Project.objects.get_or_create_default_project():
|
|
|
|
return recipe
|
|
|
|
|
2015-12-02 18:02:52 +00:00
|
|
|
if built_recipe is None:
|
2015-10-14 09:09:12 +00:00
|
|
|
return recipe
|
2015-09-29 04:45:30 +00:00
|
|
|
|
2015-10-07 13:55:07 +00:00
|
|
|
return built_recipe
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2013-11-27 13:56:19 +00:00
|
|
|
def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information):
|
2015-09-29 04:45:27 +00:00
|
|
|
if isinstance(layer_obj, Layer_Version):
|
2015-12-07 18:26:47 +00:00
|
|
|
# Special case the toaster-custom-images layer which is created
|
|
|
|
# on the fly so don't update the values which may cause the layer
|
|
|
|
# to be duplicated on a future get_or_create
|
2016-03-23 08:28:37 +00:00
|
|
|
if layer_obj.layer.name == CustomImageRecipe.LAYER_NAME:
|
2015-12-07 18:26:47 +00:00
|
|
|
return layer_obj
|
2015-09-29 04:45:27 +00:00
|
|
|
# We already found our layer version for this build so just
|
|
|
|
# update it with the new build information
|
|
|
|
logger.debug("We found our layer from toaster")
|
|
|
|
layer_obj.local_path = layer_version_information['local_path']
|
|
|
|
layer_obj.save()
|
|
|
|
self.layer_version_objects.append(layer_obj)
|
2015-09-29 04:45:30 +00:00
|
|
|
|
|
|
|
# create a new copy of this layer version as a snapshot for
|
|
|
|
# historical purposes
|
2016-04-01 11:53:55 +00:00
|
|
|
layer_copy, c = Layer_Version.objects.get_or_create(
|
|
|
|
build=build_obj,
|
|
|
|
layer=layer_obj.layer,
|
|
|
|
up_branch=layer_obj.up_branch,
|
|
|
|
branch=layer_version_information['branch'],
|
|
|
|
commit=layer_version_information['commit'],
|
|
|
|
local_path=layer_version_information['local_path'],
|
|
|
|
)
|
|
|
|
|
|
|
|
logger.info("created new historical layer version %d",
|
|
|
|
layer_copy.pk)
|
2015-09-29 04:45:30 +00:00
|
|
|
|
|
|
|
self.layer_version_built.append(layer_copy)
|
|
|
|
|
2015-09-29 04:45:27 +00:00
|
|
|
return layer_obj
|
|
|
|
|
2014-02-19 17:26:18 +00:00
|
|
|
assert isinstance(build_obj, Build)
|
|
|
|
assert isinstance(layer_obj, Layer)
|
|
|
|
assert 'branch' in layer_version_information
|
|
|
|
assert 'commit' in layer_version_information
|
|
|
|
assert 'priority' in layer_version_information
|
2015-05-14 15:10:50 +00:00
|
|
|
assert 'local_path' in layer_version_information
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-10-14 09:09:12 +00:00
|
|
|
# If we're doing a command line build then associate this new layer with the
|
|
|
|
# project to avoid it 'contaminating' toaster data
|
|
|
|
project = None
|
2015-12-10 03:56:29 +00:00
|
|
|
if build_obj.project == Project.objects.get_or_create_default_project():
|
2015-10-14 09:09:12 +00:00
|
|
|
project = build_obj.project
|
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
layer_version_object, _ = Layer_Version.objects.get_or_create(
|
2015-10-14 09:09:12 +00:00
|
|
|
build = build_obj,
|
|
|
|
layer = layer_obj,
|
|
|
|
branch = layer_version_information['branch'],
|
|
|
|
commit = layer_version_information['commit'],
|
|
|
|
priority = layer_version_information['priority'],
|
|
|
|
local_path = layer_version_information['local_path'],
|
|
|
|
project=project)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
self.layer_version_objects.append(layer_version_object)
|
|
|
|
|
2014-02-19 17:26:18 +00:00
|
|
|
return layer_version_object
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-11-25 13:40:38 +00:00
|
|
|
def get_update_layer_object(self, layer_information, brbe):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert 'name' in layer_information
|
|
|
|
assert 'layer_index_url' in layer_information
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-11-25 13:40:38 +00:00
|
|
|
if brbe is None:
|
2015-08-18 16:28:58 +00:00
|
|
|
layer_object, _ = Layer.objects.get_or_create(
|
2013-09-26 11:50:50 +00:00
|
|
|
name=layer_information['name'],
|
|
|
|
layer_index_url=layer_information['layer_index_url'])
|
2014-11-25 13:40:38 +00:00
|
|
|
return layer_object
|
|
|
|
else:
|
|
|
|
# we are under managed mode; we must match the layer used in the Project Layer
|
2015-03-04 15:52:01 +00:00
|
|
|
br_id, be_id = brbe.split(":")
|
2014-11-25 13:40:38 +00:00
|
|
|
|
2015-03-04 15:52:01 +00:00
|
|
|
# find layer by checkout path;
|
|
|
|
from bldcontrol import bbcontroller
|
|
|
|
bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
|
2014-11-25 13:40:38 +00:00
|
|
|
|
|
|
|
# we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
|
|
|
|
# but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
|
|
|
|
|
2015-03-04 15:52:01 +00:00
|
|
|
# note that this is different
|
|
|
|
buildrequest = BuildRequest.objects.get(pk = br_id)
|
|
|
|
for brl in buildrequest.brlayer_set.all():
|
|
|
|
localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
|
2015-03-16 12:50:11 +00:00
|
|
|
# we get a relative path, unless running in HEAD mode where the path is absolute
|
|
|
|
if not localdirname.startswith("/"):
|
|
|
|
localdirname = os.path.join(bc.be.sourcedir, localdirname)
|
|
|
|
#logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
|
2015-03-04 15:52:01 +00:00
|
|
|
if localdirname.startswith(layer_information['local_path']):
|
2015-09-29 04:45:27 +00:00
|
|
|
# If the build request came from toaster this field
|
|
|
|
# should contain the information from the layer_version
|
|
|
|
# That created this build request.
|
|
|
|
if brl.layer_version:
|
|
|
|
return brl.layer_version
|
|
|
|
|
2015-03-04 15:52:01 +00:00
|
|
|
# we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
|
2015-03-16 12:50:11 +00:00
|
|
|
#logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
|
2015-09-29 04:45:27 +00:00
|
|
|
|
2015-03-04 15:52:01 +00:00
|
|
|
for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
|
|
|
|
if pl.layercommit.layer.vcs_url == brl.giturl :
|
2015-03-11 14:17:21 +00:00
|
|
|
layer = pl.layercommit.layer
|
|
|
|
layer.save()
|
|
|
|
return layer
|
2015-03-04 15:52:01 +00:00
|
|
|
|
2015-03-16 12:50:11 +00:00
|
|
|
raise NotExisting("Unidentified layer %s" % pformat(layer_information))
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
|
2014-03-06 16:31:40 +00:00
|
|
|
def save_target_file_information(self, build_obj, target_obj, filedata):
|
|
|
|
assert isinstance(build_obj, Build)
|
|
|
|
assert isinstance(target_obj, Target)
|
|
|
|
dirs = filedata['dirs']
|
|
|
|
files = filedata['files']
|
|
|
|
syms = filedata['syms']
|
|
|
|
|
2015-09-22 02:42:35 +00:00
|
|
|
# always create the root directory as a special case;
|
|
|
|
# note that this is never displayed, so the owner, group,
|
|
|
|
# size, permission are irrelevant
|
|
|
|
tf_obj = Target_File.objects.create(target = target_obj,
|
|
|
|
path = '/',
|
|
|
|
size = 0,
|
|
|
|
owner = '',
|
|
|
|
group = '',
|
|
|
|
permission = '',
|
|
|
|
inodetype = Target_File.ITYPE_DIRECTORY)
|
|
|
|
tf_obj.save()
|
|
|
|
|
|
|
|
# insert directories, ordered by name depth
|
2014-03-06 16:31:40 +00:00
|
|
|
for d in sorted(dirs, key=lambda x:len(x[-1].split("/"))):
|
|
|
|
(user, group, size) = d[1:4]
|
|
|
|
permission = d[0][1:]
|
|
|
|
path = d[4].lstrip(".")
|
2015-09-22 02:42:35 +00:00
|
|
|
|
|
|
|
# we already created the root directory, so ignore any
|
|
|
|
# entry for it
|
2014-03-06 16:31:40 +00:00
|
|
|
if len(path) == 0:
|
|
|
|
continue
|
2015-09-22 02:42:35 +00:00
|
|
|
|
2014-03-06 16:31:40 +00:00
|
|
|
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
|
|
|
|
if len(parent_path) == 0:
|
|
|
|
parent_path = "/"
|
2014-11-04 16:47:36 +00:00
|
|
|
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
2014-03-06 16:31:40 +00:00
|
|
|
tf_obj = Target_File.objects.create(
|
|
|
|
target = target_obj,
|
2016-05-30 12:41:53 +00:00
|
|
|
path = path,
|
2014-03-06 16:31:40 +00:00
|
|
|
size = size,
|
|
|
|
inodetype = Target_File.ITYPE_DIRECTORY,
|
|
|
|
permission = permission,
|
|
|
|
owner = user,
|
|
|
|
group = group,
|
|
|
|
directory = parent_obj)
|
|
|
|
|
|
|
|
|
|
|
|
# we insert files
|
|
|
|
for d in files:
|
|
|
|
(user, group, size) = d[1:4]
|
|
|
|
permission = d[0][1:]
|
|
|
|
path = d[4].lstrip(".")
|
|
|
|
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
|
|
|
|
inodetype = Target_File.ITYPE_REGULAR
|
2014-03-13 15:15:38 +00:00
|
|
|
if d[0].startswith('b'):
|
2014-03-06 16:31:40 +00:00
|
|
|
inodetype = Target_File.ITYPE_BLOCK
|
2014-03-13 15:15:38 +00:00
|
|
|
if d[0].startswith('c'):
|
2014-03-06 16:31:40 +00:00
|
|
|
inodetype = Target_File.ITYPE_CHARACTER
|
2014-03-13 15:15:38 +00:00
|
|
|
if d[0].startswith('p'):
|
2014-03-06 16:31:40 +00:00
|
|
|
inodetype = Target_File.ITYPE_FIFO
|
|
|
|
|
|
|
|
tf_obj = Target_File.objects.create(
|
|
|
|
target = target_obj,
|
2016-05-30 12:41:53 +00:00
|
|
|
path = path,
|
2014-03-06 16:31:40 +00:00
|
|
|
size = size,
|
|
|
|
inodetype = inodetype,
|
|
|
|
permission = permission,
|
|
|
|
owner = user,
|
|
|
|
group = group)
|
2014-11-04 16:47:36 +00:00
|
|
|
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
2014-03-06 16:31:40 +00:00
|
|
|
tf_obj.directory = parent_obj
|
|
|
|
tf_obj.save()
|
|
|
|
|
|
|
|
# we insert symlinks
|
|
|
|
for d in syms:
|
|
|
|
(user, group, size) = d[1:4]
|
|
|
|
permission = d[0][1:]
|
|
|
|
path = d[4].lstrip(".")
|
|
|
|
filetarget_path = d[6]
|
|
|
|
|
|
|
|
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
|
|
|
|
if not filetarget_path.startswith("/"):
|
|
|
|
# we have a relative path, get a normalized absolute one
|
|
|
|
filetarget_path = parent_path + "/" + filetarget_path
|
|
|
|
fcp = filetarget_path.split("/")
|
|
|
|
fcpl = []
|
|
|
|
for i in fcp:
|
|
|
|
if i == "..":
|
|
|
|
fcpl.pop()
|
|
|
|
else:
|
|
|
|
fcpl.append(i)
|
|
|
|
filetarget_path = "/".join(fcpl)
|
|
|
|
|
|
|
|
try:
|
2016-05-30 12:41:53 +00:00
|
|
|
filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
|
2015-08-18 16:28:58 +00:00
|
|
|
except Target_File.DoesNotExist:
|
2014-03-06 16:31:40 +00:00
|
|
|
# we might have an invalid link; no way to detect this. just set it to None
|
|
|
|
filetarget_obj = None
|
|
|
|
|
|
|
|
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
|
|
|
|
|
|
|
tf_obj = Target_File.objects.create(
|
|
|
|
target = target_obj,
|
2016-05-30 12:41:53 +00:00
|
|
|
path = path,
|
2014-03-06 16:31:40 +00:00
|
|
|
size = size,
|
2014-03-13 15:15:38 +00:00
|
|
|
inodetype = Target_File.ITYPE_SYMLINK,
|
2014-03-06 16:31:40 +00:00
|
|
|
permission = permission,
|
|
|
|
owner = user,
|
|
|
|
group = group,
|
|
|
|
directory = parent_obj,
|
|
|
|
sym_target = filetarget_obj)
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-12-07 18:26:47 +00:00
|
|
|
def save_target_package_information(self, build_obj, target_obj, packagedict, pkgpnmap, recipes, built_package=False):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert isinstance(build_obj, Build)
|
|
|
|
assert isinstance(target_obj, Target)
|
|
|
|
|
|
|
|
errormsg = ""
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in packagedict:
|
2015-12-07 18:26:47 +00:00
|
|
|
# Search name swtiches round the installed name vs package name
|
|
|
|
# by default installed name == package name
|
2014-01-17 17:58:05 +00:00
|
|
|
searchname = p
|
2015-10-07 13:55:08 +00:00
|
|
|
if p not in pkgpnmap:
|
|
|
|
logger.warning("Image packages list contains %p, but is"
|
|
|
|
" missing from all packages list where the"
|
|
|
|
" metadata comes from. Skipping...", p)
|
|
|
|
continue
|
|
|
|
|
2014-01-17 17:58:05 +00:00
|
|
|
if 'OPKGN' in pkgpnmap[p].keys():
|
|
|
|
searchname = pkgpnmap[p]['OPKGN']
|
|
|
|
|
2015-12-07 18:26:47 +00:00
|
|
|
built_recipe = recipes[pkgpnmap[p]['PN']]
|
|
|
|
|
|
|
|
if built_package:
|
|
|
|
packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
|
|
|
|
recipe = built_recipe
|
|
|
|
else:
|
|
|
|
packagedict[p]['object'], created = \
|
|
|
|
CustomImagePackage.objects.get_or_create(name=searchname)
|
2016-02-05 11:13:46 +00:00
|
|
|
# Clear the Package_Dependency objects as we're going to update
|
|
|
|
# the CustomImagePackage with the latest dependency information
|
|
|
|
packagedict[p]['object'].package_dependencies_target.all().delete()
|
|
|
|
packagedict[p]['object'].package_dependencies_source.all().delete()
|
2015-12-07 18:26:47 +00:00
|
|
|
try:
|
2016-04-01 11:53:55 +00:00
|
|
|
recipe = self._cached_get(
|
|
|
|
Recipe,
|
|
|
|
name=built_recipe.name,
|
|
|
|
layer_version__build=None,
|
|
|
|
layer_version__up_branch=
|
|
|
|
built_recipe.layer_version.up_branch,
|
|
|
|
file_path=built_recipe.file_path,
|
|
|
|
version=built_recipe.version
|
|
|
|
)
|
2015-12-07 18:26:47 +00:00
|
|
|
except (Recipe.DoesNotExist,
|
|
|
|
Recipe.MultipleObjectsReturned) as e:
|
|
|
|
logger.info("We did not find one recipe for the"
|
|
|
|
"configuration data package %s %s" % (p, e))
|
|
|
|
continue
|
|
|
|
|
2014-11-25 10:12:46 +00:00
|
|
|
if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
|
2013-11-26 18:12:43 +00:00
|
|
|
# fill in everything we can from the runtime-reverse package data
|
|
|
|
try:
|
2015-12-07 18:26:47 +00:00
|
|
|
packagedict[p]['object'].recipe = recipe
|
2013-11-26 18:12:43 +00:00
|
|
|
packagedict[p]['object'].version = pkgpnmap[p]['PV']
|
2014-01-17 17:58:05 +00:00
|
|
|
packagedict[p]['object'].installed_name = p
|
2013-11-26 18:12:43 +00:00
|
|
|
packagedict[p]['object'].revision = pkgpnmap[p]['PR']
|
|
|
|
packagedict[p]['object'].license = pkgpnmap[p]['LICENSE']
|
|
|
|
packagedict[p]['object'].section = pkgpnmap[p]['SECTION']
|
|
|
|
packagedict[p]['object'].summary = pkgpnmap[p]['SUMMARY']
|
|
|
|
packagedict[p]['object'].description = pkgpnmap[p]['DESCRIPTION']
|
|
|
|
packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
|
|
|
|
|
|
|
|
# no files recorded for this package, so save files info
|
2014-11-04 16:47:36 +00:00
|
|
|
packagefile_objects = []
|
2013-11-26 18:12:43 +00:00
|
|
|
for targetpath in pkgpnmap[p]['FILES_INFO']:
|
|
|
|
targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
|
2014-11-04 16:47:36 +00:00
|
|
|
packagefile_objects.append(Package_File( package = packagedict[p]['object'],
|
2013-11-26 18:12:43 +00:00
|
|
|
path = targetpath,
|
2014-11-04 16:47:36 +00:00
|
|
|
size = targetfilesize))
|
|
|
|
if len(packagefile_objects):
|
|
|
|
Package_File.objects.bulk_create(packagefile_objects)
|
2013-11-26 18:12:43 +00:00
|
|
|
except KeyError as e:
|
2014-02-19 17:26:18 +00:00
|
|
|
errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
|
2013-11-26 18:12:43 +00:00
|
|
|
|
|
|
|
# save disk installed size
|
|
|
|
packagedict[p]['object'].installed_size = packagedict[p]['size']
|
|
|
|
packagedict[p]['object'].save()
|
|
|
|
|
2015-12-07 18:26:47 +00:00
|
|
|
if built_package:
|
|
|
|
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs = []
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in packagedict:
|
|
|
|
for (px,deptype) in packagedict[p]['depends']:
|
2013-11-26 18:12:43 +00:00
|
|
|
if deptype == 'depends':
|
|
|
|
tdeptype = Package_Dependency.TYPE_TRDEPENDS
|
|
|
|
elif deptype == 'recommends':
|
|
|
|
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
|
|
|
|
|
2015-10-07 13:55:08 +00:00
|
|
|
try:
|
|
|
|
packagedeps_objs.append(Package_Dependency(
|
|
|
|
package = packagedict[p]['object'],
|
|
|
|
depends_on = packagedict[px]['object'],
|
|
|
|
dep_type = tdeptype,
|
|
|
|
target = target_obj))
|
|
|
|
except KeyError as e:
|
2016-05-09 13:01:12 +00:00
|
|
|
logger.warning("Could not add dependency to the package %s "
|
|
|
|
"because %s is an unknown package", p, px)
|
2014-11-04 16:47:36 +00:00
|
|
|
|
|
|
|
if len(packagedeps_objs) > 0:
|
|
|
|
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
2015-10-07 13:55:08 +00:00
|
|
|
else:
|
|
|
|
logger.info("No package dependencies created")
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
if len(errormsg) > 0:
|
2016-05-09 13:01:12 +00:00
|
|
|
logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
|
2014-02-19 17:26:18 +00:00
|
|
|
|
2014-03-12 18:47:39 +00:00
|
|
|
def save_target_image_file_information(self, target_obj, file_name, file_size):
|
2015-08-18 16:28:58 +00:00
|
|
|
Target_Image_File.objects.create( target = target_obj,
|
2014-03-12 18:47:39 +00:00
|
|
|
file_name = file_name,
|
|
|
|
file_size = file_size)
|
2014-12-05 15:14:20 +00:00
|
|
|
|
2016-07-12 22:54:44 +00:00
|
|
|
def save_artifact_information_no_dedupe(self, build_obj, file_name, file_size):
|
|
|
|
"""
|
|
|
|
Save artifact information without checking for duplicate paths;
|
|
|
|
this is used when we are saving data about an artifact which was
|
|
|
|
generated by a previous build but which is also relevant to this build,
|
|
|
|
e.g. a bzImage file.
|
|
|
|
"""
|
|
|
|
BuildArtifact.objects.create(build=build_obj, file_name=file_name,
|
|
|
|
file_size=file_size)
|
|
|
|
|
2014-12-05 15:14:20 +00:00
|
|
|
def save_artifact_information(self, build_obj, file_name, file_size):
|
|
|
|
# we skip the image files from other builds
|
|
|
|
if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
# do not update artifacts found in other builds
|
|
|
|
if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
|
|
|
|
return
|
|
|
|
|
2016-07-12 22:54:44 +00:00
|
|
|
self.save_artifact_information_no_dedupe(self, build_obj, file_name,
|
|
|
|
file_size)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
def create_logmessage(self, log_information):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert 'build' in log_information
|
|
|
|
assert 'level' in log_information
|
|
|
|
assert 'message' in log_information
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
log_object = LogMessage.objects.create(
|
|
|
|
build = log_information['build'],
|
|
|
|
level = log_information['level'],
|
|
|
|
message = log_information['message'])
|
|
|
|
|
|
|
|
for v in vars(log_object):
|
|
|
|
if v in log_information.keys():
|
|
|
|
vars(log_object)[v] = log_information[v]
|
|
|
|
|
|
|
|
return log_object.save()
|
|
|
|
|
|
|
|
|
2015-12-07 18:26:47 +00:00
|
|
|
def save_build_package_information(self, build_obj, package_info, recipes,
|
|
|
|
built_package):
|
|
|
|
# assert isinstance(build_obj, Build)
|
2014-02-19 17:26:18 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
# create and save the object
|
2014-02-19 17:26:18 +00:00
|
|
|
pname = package_info['PKG']
|
2015-12-07 18:26:47 +00:00
|
|
|
built_recipe = recipes[package_info['PN']]
|
2014-01-17 17:58:05 +00:00
|
|
|
if 'OPKGN' in package_info.keys():
|
|
|
|
pname = package_info['OPKGN']
|
|
|
|
|
2015-12-07 18:26:47 +00:00
|
|
|
if built_package:
|
|
|
|
bp_object, _ = Package.objects.get_or_create( build = build_obj,
|
|
|
|
name = pname )
|
|
|
|
recipe = built_recipe
|
|
|
|
else:
|
|
|
|
bp_object, created = \
|
|
|
|
CustomImagePackage.objects.get_or_create(name=pname)
|
|
|
|
try:
|
|
|
|
recipe = self._cached_get(Recipe,
|
|
|
|
name=built_recipe.name,
|
|
|
|
layer_version__build=None,
|
|
|
|
file_path=built_recipe.file_path,
|
|
|
|
version=built_recipe.version)
|
|
|
|
|
|
|
|
except (Recipe.DoesNotExist, Recipe.MultipleObjectsReturned):
|
|
|
|
logger.debug("We did not find one recipe for the configuration"
|
|
|
|
"data package %s" % pname)
|
|
|
|
return
|
2013-11-26 18:12:43 +00:00
|
|
|
|
2014-01-17 17:58:05 +00:00
|
|
|
bp_object.installed_name = package_info['PKG']
|
2015-12-07 18:26:47 +00:00
|
|
|
bp_object.recipe = recipe
|
2013-11-26 18:12:43 +00:00
|
|
|
bp_object.version = package_info['PKGV']
|
|
|
|
bp_object.revision = package_info['PKGR']
|
|
|
|
bp_object.summary = package_info['SUMMARY']
|
|
|
|
bp_object.description = package_info['DESCRIPTION']
|
|
|
|
bp_object.size = int(package_info['PKGSIZE'])
|
|
|
|
bp_object.section = package_info['SECTION']
|
|
|
|
bp_object.license = package_info['LICENSE']
|
|
|
|
bp_object.save()
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
# save any attached file information
|
2014-11-04 16:47:36 +00:00
|
|
|
packagefile_objects = []
|
2013-11-01 15:58:28 +00:00
|
|
|
for path in package_info['FILES_INFO']:
|
2015-08-18 16:28:58 +00:00
|
|
|
packagefile_objects.append(Package_File( package = bp_object,
|
2013-09-26 11:50:50 +00:00
|
|
|
path = path,
|
2014-11-04 16:47:36 +00:00
|
|
|
size = package_info['FILES_INFO'][path] ))
|
|
|
|
if len(packagefile_objects):
|
|
|
|
Package_File.objects.bulk_create(packagefile_objects)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2013-11-26 18:12:43 +00:00
|
|
|
def _po_byname(p):
|
2015-12-07 18:26:47 +00:00
|
|
|
if built_package:
|
|
|
|
pkg, created = Package.objects.get_or_create(build=build_obj,
|
|
|
|
name=p)
|
|
|
|
else:
|
|
|
|
pkg, created = CustomImagePackage.objects.get_or_create(name=p)
|
|
|
|
|
2014-02-10 14:26:12 +00:00
|
|
|
if created:
|
|
|
|
pkg.size = -1
|
|
|
|
pkg.save()
|
|
|
|
return pkg
|
2013-11-26 18:12:43 +00:00
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs = []
|
2013-09-26 11:50:50 +00:00
|
|
|
# save soft dependency information
|
2013-11-01 15:58:28 +00:00
|
|
|
if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in bb.utils.explode_deps(package_info['RDEPENDS']):
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
|
|
|
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
|
2013-11-01 15:58:28 +00:00
|
|
|
if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in bb.utils.explode_deps(package_info['RPROVIDES']):
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
|
|
|
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
|
2013-11-01 15:58:28 +00:00
|
|
|
if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
|
|
|
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
|
2013-11-01 15:58:28 +00:00
|
|
|
if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
|
|
|
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
|
2013-11-01 15:58:28 +00:00
|
|
|
if 'RREPLACES' in package_info and package_info['RREPLACES']:
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in bb.utils.explode_deps(package_info['RREPLACES']):
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
|
|
|
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
|
2013-11-01 15:58:28 +00:00
|
|
|
if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
|
2013-09-26 11:50:50 +00:00
|
|
|
for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
|
2014-11-04 16:47:36 +00:00
|
|
|
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
|
|
|
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
|
|
|
|
|
|
|
|
if len(packagedeps_objs) > 0:
|
|
|
|
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
return bp_object
|
|
|
|
|
|
|
|
def save_build_variables(self, build_obj, vardump):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert isinstance(build_obj, Build)
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
for k in vardump:
|
2014-11-04 16:47:36 +00:00
|
|
|
desc = vardump[k]['doc']
|
2014-04-03 10:16:23 +00:00
|
|
|
if desc is None:
|
|
|
|
var_words = [word for word in k.split('_')]
|
|
|
|
root_var = "_".join([word for word in var_words if word.isupper()])
|
|
|
|
if root_var and root_var != k and root_var in vardump:
|
|
|
|
desc = vardump[root_var]['doc']
|
|
|
|
if desc is None:
|
|
|
|
desc = ''
|
2014-11-04 16:47:36 +00:00
|
|
|
if len(desc):
|
2015-12-02 18:02:51 +00:00
|
|
|
HelpText.objects.get_or_create(build=build_obj,
|
|
|
|
area=HelpText.VARIABLE,
|
|
|
|
key=k, text=desc)
|
2013-09-26 11:50:50 +00:00
|
|
|
if not bool(vardump[k]['func']):
|
2014-11-04 16:47:36 +00:00
|
|
|
value = vardump[k]['v']
|
2013-11-01 15:58:28 +00:00
|
|
|
if value is None:
|
|
|
|
value = ''
|
2013-11-14 13:56:30 +00:00
|
|
|
variable_obj = Variable.objects.create( build = build_obj,
|
2013-09-26 11:50:50 +00:00
|
|
|
variable_name = k,
|
2013-11-01 15:58:28 +00:00
|
|
|
variable_value = value,
|
|
|
|
description = desc)
|
2014-11-04 16:47:36 +00:00
|
|
|
|
|
|
|
varhist_objects = []
|
2013-11-14 13:56:30 +00:00
|
|
|
for vh in vardump[k]['history']:
|
2013-12-04 13:58:06 +00:00
|
|
|
if not 'documentation.conf' in vh['file']:
|
2014-11-04 16:47:36 +00:00
|
|
|
varhist_objects.append(VariableHistory( variable = variable_obj,
|
2013-12-04 13:58:06 +00:00
|
|
|
file_name = vh['file'],
|
|
|
|
line_number = vh['line'],
|
2014-11-04 16:47:36 +00:00
|
|
|
operation = vh['op']))
|
|
|
|
if len(varhist_objects):
|
|
|
|
VariableHistory.objects.bulk_create(varhist_objects)
|
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
|
|
|
|
class MockEvent(object):
|
|
|
|
""" This object is used to create event, for which normal event-processing methods can
|
|
|
|
be used, out of data that is not coming via an actual event
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.msg = None
|
|
|
|
self.levelno = None
|
|
|
|
self.taskname = None
|
|
|
|
self.taskhash = None
|
|
|
|
self.pathname = None
|
|
|
|
self.lineno = None
|
|
|
|
|
2014-07-24 17:20:25 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
class BuildInfoHelper(object):
|
|
|
|
""" This class gathers the build information from the server and sends it
|
|
|
|
towards the ORM wrapper for storing in the database
|
|
|
|
It is instantiated once per build
|
|
|
|
Keeps in memory all data that needs matching before writing it to the database
|
|
|
|
"""
|
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
# pylint: disable=protected-access
|
|
|
|
# the code will look into the protected variables of the event; no easy way around this
|
|
|
|
# pylint: disable=bad-continuation
|
|
|
|
# we do not follow the python conventions for continuation indentation due to long lines here
|
2014-07-24 17:20:25 +00:00
|
|
|
|
2016-04-06 16:46:14 +00:00
|
|
|
def __init__(self, server, has_build_history = False, brbe = None):
|
2013-09-26 11:50:50 +00:00
|
|
|
self.internal_state = {}
|
2014-02-18 18:41:47 +00:00
|
|
|
self.internal_state['taskdata'] = {}
|
2015-09-29 04:45:27 +00:00
|
|
|
self.internal_state['targets'] = []
|
2013-09-26 11:50:50 +00:00
|
|
|
self.task_order = 0
|
2015-02-24 17:20:57 +00:00
|
|
|
self.autocommit_step = 1
|
2013-09-26 11:50:50 +00:00
|
|
|
self.server = server
|
2015-02-26 21:41:58 +00:00
|
|
|
# we use manual transactions if the database doesn't autocommit on us
|
|
|
|
if not connection.features.autocommits_when_autocommit_is_off:
|
|
|
|
transaction.set_autocommit(False)
|
2013-09-26 11:50:50 +00:00
|
|
|
self.orm_wrapper = ORMWrapper()
|
|
|
|
self.has_build_history = has_build_history
|
|
|
|
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
|
2016-02-10 18:34:10 +00:00
|
|
|
|
|
|
|
# this is set for Toaster-triggered builds by localhostbecontroller
|
|
|
|
# via toasterui
|
2016-04-06 16:46:14 +00:00
|
|
|
self.brbe = brbe
|
2016-02-10 18:34:10 +00:00
|
|
|
|
|
|
|
self.project = None
|
|
|
|
|
2014-01-20 09:39:34 +00:00
|
|
|
logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
|
2014-07-24 17:20:25 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
###################
|
|
|
|
## methods to convert event/external info into objects that the ORM layer uses
|
|
|
|
|
|
|
|
|
2015-10-29 11:59:44 +00:00
|
|
|
def _get_build_information(self, build_log_path):
|
2013-09-26 11:50:50 +00:00
|
|
|
build_info = {}
|
|
|
|
build_info['machine'] = self.server.runCommand(["getVariable", "MACHINE"])[0]
|
|
|
|
build_info['distro'] = self.server.runCommand(["getVariable", "DISTRO"])[0]
|
|
|
|
build_info['distro_version'] = self.server.runCommand(["getVariable", "DISTRO_VERSION"])[0]
|
2015-06-17 16:30:34 +00:00
|
|
|
build_info['started_on'] = timezone.now()
|
|
|
|
build_info['completed_on'] = timezone.now()
|
2015-10-29 11:59:44 +00:00
|
|
|
build_info['cooker_log_path'] = build_log_path
|
2013-09-26 11:50:50 +00:00
|
|
|
build_info['build_name'] = self.server.runCommand(["getVariable", "BUILDNAME"])[0]
|
|
|
|
build_info['bitbake_version'] = self.server.runCommand(["getVariable", "BB_VERSION"])[0]
|
2015-12-02 18:02:42 +00:00
|
|
|
build_info['project'] = self.project = self.server.runCommand(["getVariable", "TOASTER_PROJECT"])[0]
|
2013-09-26 11:50:50 +00:00
|
|
|
return build_info
|
|
|
|
|
|
|
|
def _get_task_information(self, event, recipe):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert 'taskname' in vars(event)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
task_information = {}
|
|
|
|
task_information['build'] = self.internal_state['build']
|
|
|
|
task_information['outcome'] = Task.OUTCOME_NA
|
|
|
|
task_information['recipe'] = recipe
|
|
|
|
task_information['task_name'] = event.taskname
|
|
|
|
try:
|
|
|
|
# some tasks don't come with a hash. and that's ok
|
|
|
|
task_information['sstate_checksum'] = event.taskhash
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
return task_information
|
|
|
|
|
|
|
|
def _get_layer_version_for_path(self, path):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert path.startswith("/")
|
|
|
|
assert 'build' in self.internal_state
|
|
|
|
|
2015-12-02 18:02:58 +00:00
|
|
|
def _slkey_interactive(layer_version):
|
|
|
|
assert isinstance(layer_version, Layer_Version)
|
|
|
|
return len(layer_version.local_path)
|
|
|
|
|
|
|
|
# Heuristics: we always match recipe to the deepest layer path in the discovered layers
|
|
|
|
for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
|
|
|
|
# we can match to the recipe file path
|
|
|
|
if path.startswith(lvo.local_path):
|
|
|
|
return lvo
|
2015-02-05 13:18:06 +00:00
|
|
|
|
|
|
|
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log
|
2016-05-09 13:01:12 +00:00
|
|
|
logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
|
2015-02-05 13:18:06 +00:00
|
|
|
|
|
|
|
#mockup the new layer
|
2015-10-14 09:09:12 +00:00
|
|
|
unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
|
2015-08-18 16:28:58 +00:00
|
|
|
unknown_layer_version_obj, _ = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
|
2014-12-09 11:57:38 +00:00
|
|
|
|
2015-05-01 15:20:33 +00:00
|
|
|
# append it so we don't run into this error again and again
|
|
|
|
self.orm_wrapper.layer_version_objects.append(unknown_layer_version_obj)
|
|
|
|
|
2014-12-09 11:57:38 +00:00
|
|
|
return unknown_layer_version_obj
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2013-11-27 16:38:29 +00:00
|
|
|
def _get_recipe_information_from_taskfile(self, taskfile):
|
2014-02-19 17:26:18 +00:00
|
|
|
localfilepath = taskfile.split(":")[-1]
|
2015-05-14 15:10:50 +00:00
|
|
|
filepath_flags = ":".join(sorted(taskfile.split(":")[:-1]))
|
2014-02-19 17:26:18 +00:00
|
|
|
layer_version_obj = self._get_layer_version_for_path(localfilepath)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-05-14 15:10:50 +00:00
|
|
|
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
recipe_info = {}
|
|
|
|
recipe_info['layer_version'] = layer_version_obj
|
2015-05-14 15:10:50 +00:00
|
|
|
recipe_info['file_path'] = localfilepath
|
|
|
|
recipe_info['pathflags'] = filepath_flags
|
|
|
|
|
|
|
|
if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
|
|
|
|
recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
return recipe_info
|
|
|
|
|
|
|
|
def _get_path_information(self, task_object):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert isinstance(task_object, Task)
|
2015-12-17 14:54:55 +00:00
|
|
|
build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
|
2013-09-26 11:50:50 +00:00
|
|
|
build_stats_path = []
|
|
|
|
|
|
|
|
for t in self.internal_state['targets']:
|
|
|
|
buildname = self.internal_state['build'].build_name
|
2013-11-14 10:53:00 +00:00
|
|
|
pe, pv = task_object.recipe.version.split(":",1)
|
|
|
|
if len(pe) > 0:
|
|
|
|
package = task_object.recipe.name + "-" + pe + "_" + pv
|
|
|
|
else:
|
|
|
|
package = task_object.recipe.name + "-" + pv
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-12-17 14:54:55 +00:00
|
|
|
build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
|
|
|
|
buildname=buildname,
|
2013-09-26 11:50:50 +00:00
|
|
|
package=package))
|
|
|
|
|
|
|
|
return build_stats_path
|
|
|
|
|
|
|
|
|
|
|
|
################################
|
|
|
|
## external available methods to store information
|
2014-11-26 17:09:16 +00:00
|
|
|
@staticmethod
|
|
|
|
def _get_data_from_event(event):
|
|
|
|
evdata = None
|
|
|
|
if '_localdata' in vars(event):
|
|
|
|
evdata = event._localdata
|
|
|
|
elif 'data' in vars(event):
|
|
|
|
evdata = event.data
|
|
|
|
else:
|
|
|
|
raise Exception("Event with neither _localdata or data properties")
|
|
|
|
return evdata
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2013-11-27 13:56:19 +00:00
|
|
|
def store_layer_info(self, event):
|
2014-11-26 17:09:16 +00:00
|
|
|
layerinfos = BuildInfoHelper._get_data_from_event(event)
|
2013-11-27 13:56:19 +00:00
|
|
|
self.internal_state['lvs'] = {}
|
|
|
|
for layer in layerinfos:
|
2015-03-16 12:50:11 +00:00
|
|
|
try:
|
|
|
|
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
|
2015-05-14 15:10:50 +00:00
|
|
|
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
|
2015-03-16 12:50:11 +00:00
|
|
|
except NotExisting as nee:
|
2016-05-09 13:01:12 +00:00
|
|
|
logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
|
2013-11-27 13:56:19 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-10-29 11:59:44 +00:00
|
|
|
def store_started_build(self, event, build_log_path):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert '_pkgs' in vars(event)
|
2015-10-29 11:59:44 +00:00
|
|
|
build_information = self._get_build_information(build_log_path)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-12-02 18:02:42 +00:00
|
|
|
# Update brbe and project as they can be changed for every build
|
|
|
|
self.project = build_information['project']
|
|
|
|
|
2015-05-14 15:44:27 +00:00
|
|
|
build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe, self.project)
|
2014-07-04 11:17:35 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
self.internal_state['build'] = build_obj
|
|
|
|
|
2013-11-27 13:56:19 +00:00
|
|
|
# save layer version information for this build
|
2014-12-09 11:57:38 +00:00
|
|
|
if not 'lvs' in self.internal_state:
|
|
|
|
logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
|
|
|
|
else:
|
|
|
|
for layer_obj in self.internal_state['lvs']:
|
|
|
|
self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
|
2013-11-27 13:56:19 +00:00
|
|
|
|
2014-12-09 11:57:38 +00:00
|
|
|
del self.internal_state['lvs']
|
2013-11-27 13:56:19 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
# create target information
|
|
|
|
target_information = {}
|
2014-02-19 17:26:18 +00:00
|
|
|
target_information['targets'] = event._pkgs
|
2013-09-26 11:50:50 +00:00
|
|
|
target_information['build'] = build_obj
|
|
|
|
|
2015-10-16 17:31:12 +00:00
|
|
|
self.internal_state['targets'] = self.orm_wrapper.get_or_create_targets(target_information)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
# Save build configuration
|
2014-12-09 11:57:38 +00:00
|
|
|
data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
|
2015-06-17 16:30:34 +00:00
|
|
|
|
|
|
|
# convert the paths from absolute to relative to either the build directory or layer checkouts
|
|
|
|
path_prefixes = []
|
|
|
|
|
2015-06-24 13:53:21 +00:00
|
|
|
if self.brbe is not None:
|
2015-08-18 16:28:58 +00:00
|
|
|
_, be_id = self.brbe.split(":")
|
2015-06-24 13:53:21 +00:00
|
|
|
be = BuildEnvironment.objects.get(pk = be_id)
|
|
|
|
path_prefixes.append(be.builddir)
|
2015-06-17 16:30:34 +00:00
|
|
|
|
|
|
|
for layer in sorted(self.orm_wrapper.layer_version_objects, key = lambda x:len(x.local_path), reverse=True):
|
|
|
|
path_prefixes.append(layer.local_path)
|
|
|
|
|
|
|
|
# we strip the prefixes
|
|
|
|
for k in data:
|
|
|
|
if not bool(data[k]['func']):
|
|
|
|
for vh in data[k]['history']:
|
|
|
|
if not 'documentation.conf' in vh['file']:
|
|
|
|
abs_file_name = vh['file']
|
|
|
|
for pp in path_prefixes:
|
|
|
|
if abs_file_name.startswith(pp + "/"):
|
|
|
|
vh['file']=abs_file_name[len(pp + "/"):]
|
|
|
|
break
|
|
|
|
|
|
|
|
# save the variables
|
2015-01-14 12:46:55 +00:00
|
|
|
self.orm_wrapper.save_build_variables(build_obj, data)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-07-24 17:20:25 +00:00
|
|
|
return self.brbe
|
|
|
|
|
2014-12-05 15:14:20 +00:00
|
|
|
def update_artifact_image_file(self, event):
|
|
|
|
evdata = BuildInfoHelper._get_data_from_event(event)
|
|
|
|
for artifact_path in evdata.keys():
|
|
|
|
self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
def update_build_information(self, event, errors, warnings, taskfailures):
|
|
|
|
if 'build' in self.internal_state:
|
|
|
|
self.orm_wrapper.update_build_object(self.internal_state['build'], errors, warnings, taskfailures)
|
|
|
|
|
|
|
|
def store_started_task(self, event):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped))
|
|
|
|
assert 'taskfile' in vars(event)
|
|
|
|
localfilepath = event.taskfile.split(":")[-1]
|
|
|
|
assert localfilepath.startswith("/")
|
|
|
|
|
|
|
|
identifier = event.taskfile + ":" + event.taskname
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2013-11-27 16:38:29 +00:00
|
|
|
recipe_information = self._get_recipe_information_from_taskfile(event.taskfile)
|
2014-03-14 16:59:27 +00:00
|
|
|
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
task_information = self._get_task_information(event, recipe)
|
|
|
|
task_information['outcome'] = Task.OUTCOME_NA
|
|
|
|
|
|
|
|
if isinstance(event, bb.runqueue.runQueueTaskSkipped):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert 'reason' in vars(event)
|
2013-09-26 11:50:50 +00:00
|
|
|
task_information['task_executed'] = False
|
|
|
|
if event.reason == "covered":
|
|
|
|
task_information['outcome'] = Task.OUTCOME_COVERED
|
|
|
|
if event.reason == "existing":
|
2014-01-14 12:35:12 +00:00
|
|
|
task_information['outcome'] = Task.OUTCOME_PREBUILT
|
2013-09-26 11:50:50 +00:00
|
|
|
else:
|
|
|
|
task_information['task_executed'] = True
|
2013-11-01 15:58:31 +00:00
|
|
|
if 'noexec' in vars(event) and event.noexec == True:
|
2014-01-14 12:35:12 +00:00
|
|
|
task_information['task_executed'] = False
|
2014-02-13 13:12:39 +00:00
|
|
|
task_information['outcome'] = Task.OUTCOME_EMPTY
|
2014-01-14 12:35:12 +00:00
|
|
|
task_information['script_type'] = Task.CODING_NA
|
|
|
|
|
|
|
|
# do not assign order numbers to scene tasks
|
|
|
|
if not isinstance(event, bb.runqueue.sceneQueueTaskStarted):
|
|
|
|
self.task_order += 1
|
|
|
|
task_information['order'] = self.task_order
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
self.orm_wrapper.get_update_task_object(task_information)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-02-18 18:41:47 +00:00
|
|
|
self.internal_state['taskdata'][identifier] = {
|
2014-02-13 13:12:39 +00:00
|
|
|
'outcome': task_information['outcome'],
|
|
|
|
}
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2013-11-27 16:38:29 +00:00
|
|
|
|
|
|
|
def store_tasks_stats(self, event):
|
2016-03-08 11:32:12 +00:00
|
|
|
task_data = BuildInfoHelper._get_data_from_event(event)
|
2014-02-19 17:26:18 +00:00
|
|
|
|
2016-03-08 11:32:12 +00:00
|
|
|
for (task_file, task_name, task_stats, recipe_name) in task_data:
|
|
|
|
build = self.internal_state['build']
|
|
|
|
self.orm_wrapper.update_task_object(build, task_name, recipe_name, task_stats)
|
2013-11-27 16:38:29 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
def update_and_store_task(self, event):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert 'taskfile' in vars(event)
|
|
|
|
localfilepath = event.taskfile.split(":")[-1]
|
|
|
|
assert localfilepath.startswith("/")
|
|
|
|
|
|
|
|
identifier = event.taskfile + ":" + event.taskname
|
2014-03-05 14:59:55 +00:00
|
|
|
if not identifier in self.internal_state['taskdata']:
|
|
|
|
if isinstance(event, bb.build.TaskBase):
|
|
|
|
# we do a bit of guessing
|
|
|
|
candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)]
|
|
|
|
if len(candidates) == 1:
|
|
|
|
identifier = candidates[0]
|
2014-02-18 18:41:47 +00:00
|
|
|
|
2014-03-05 14:59:55 +00:00
|
|
|
assert identifier in self.internal_state['taskdata']
|
|
|
|
identifierlist = identifier.split(":")
|
|
|
|
realtaskfile = ":".join(identifierlist[0:len(identifierlist)-1])
|
|
|
|
recipe_information = self._get_recipe_information_from_taskfile(realtaskfile)
|
|
|
|
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information, True)
|
2013-09-26 11:50:50 +00:00
|
|
|
task_information = self._get_task_information(event,recipe)
|
2014-02-13 13:12:39 +00:00
|
|
|
|
2014-02-18 18:41:47 +00:00
|
|
|
task_information['outcome'] = self.internal_state['taskdata'][identifier]['outcome']
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
if 'logfile' in vars(event):
|
|
|
|
task_information['logfile'] = event.logfile
|
|
|
|
|
|
|
|
if '_message' in vars(event):
|
|
|
|
task_information['message'] = event._message
|
|
|
|
|
2013-11-01 15:58:31 +00:00
|
|
|
if 'taskflags' in vars(event):
|
|
|
|
# with TaskStarted, we get even more information
|
|
|
|
if 'python' in event.taskflags.keys() and event.taskflags['python'] == '1':
|
2013-09-26 11:50:50 +00:00
|
|
|
task_information['script_type'] = Task.CODING_PYTHON
|
|
|
|
else:
|
|
|
|
task_information['script_type'] = Task.CODING_SHELL
|
|
|
|
|
2014-02-13 13:12:39 +00:00
|
|
|
if task_information['outcome'] == Task.OUTCOME_NA:
|
|
|
|
if isinstance(event, (bb.runqueue.runQueueTaskCompleted, bb.runqueue.sceneQueueTaskCompleted)):
|
|
|
|
task_information['outcome'] = Task.OUTCOME_SUCCESS
|
2014-02-18 18:41:47 +00:00
|
|
|
del self.internal_state['taskdata'][identifier]
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-02-13 13:12:39 +00:00
|
|
|
if isinstance(event, (bb.runqueue.runQueueTaskFailed, bb.runqueue.sceneQueueTaskFailed)):
|
|
|
|
task_information['outcome'] = Task.OUTCOME_FAILED
|
2014-02-18 18:41:47 +00:00
|
|
|
del self.internal_state['taskdata'][identifier]
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-02-26 21:41:58 +00:00
|
|
|
if not connection.features.autocommits_when_autocommit_is_off:
|
|
|
|
# we force a sync point here, to get the progress bar to show
|
|
|
|
if self.autocommit_step % 3 == 0:
|
|
|
|
transaction.set_autocommit(True)
|
|
|
|
transaction.set_autocommit(False)
|
|
|
|
self.autocommit_step += 1
|
2015-02-24 17:20:57 +00:00
|
|
|
|
2014-02-19 17:26:18 +00:00
|
|
|
self.orm_wrapper.get_update_task_object(task_information, True) # must exist
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
|
2014-02-17 17:33:27 +00:00
|
|
|
def store_missed_state_tasks(self, event):
|
2014-11-26 17:09:16 +00:00
|
|
|
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
|
2014-02-17 17:33:27 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
# identifier = fn + taskname + "_setscene"
|
2014-02-17 17:33:27 +00:00
|
|
|
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
|
|
|
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
|
2014-04-02 10:11:29 +00:00
|
|
|
mevent = MockEvent()
|
|
|
|
mevent.taskname = taskname
|
|
|
|
mevent.taskhash = taskhash
|
|
|
|
task_information = self._get_task_information(mevent,recipe)
|
2014-02-17 17:33:27 +00:00
|
|
|
|
2015-06-17 16:30:34 +00:00
|
|
|
task_information['start_time'] = timezone.now()
|
2014-02-17 17:33:27 +00:00
|
|
|
task_information['outcome'] = Task.OUTCOME_NA
|
|
|
|
task_information['sstate_checksum'] = taskhash
|
|
|
|
task_information['sstate_result'] = Task.SSTATE_MISS
|
|
|
|
task_information['path_to_sstate_obj'] = sstatefile
|
|
|
|
|
|
|
|
self.orm_wrapper.get_update_task_object(task_information)
|
2014-03-25 15:51:07 +00:00
|
|
|
|
2014-11-26 17:09:16 +00:00
|
|
|
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
|
2014-03-25 15:51:07 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
# identifier = fn + taskname + "_setscene"
|
2014-03-25 15:51:07 +00:00
|
|
|
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
|
|
|
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
|
2014-04-02 10:11:29 +00:00
|
|
|
mevent = MockEvent()
|
|
|
|
mevent.taskname = taskname
|
|
|
|
mevent.taskhash = taskhash
|
|
|
|
task_information = self._get_task_information(mevent,recipe)
|
2014-03-25 15:51:07 +00:00
|
|
|
|
|
|
|
task_information['path_to_sstate_obj'] = sstatefile
|
|
|
|
|
|
|
|
self.orm_wrapper.get_update_task_object(task_information)
|
2014-02-17 17:33:27 +00:00
|
|
|
|
|
|
|
|
2013-11-26 18:12:43 +00:00
|
|
|
def store_target_package_data(self, event):
|
|
|
|
# for all image targets
|
2013-09-26 11:50:50 +00:00
|
|
|
for target in self.internal_state['targets']:
|
2013-11-26 18:12:43 +00:00
|
|
|
if target.is_image:
|
2015-10-07 13:55:08 +00:00
|
|
|
pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
|
2016-04-06 16:46:47 +00:00
|
|
|
imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'].get(target.target, {})
|
|
|
|
filedata = BuildInfoHelper._get_data_from_event(event)['filedata'].get(target.target, {})
|
2015-10-07 13:55:08 +00:00
|
|
|
|
2014-03-19 17:39:25 +00:00
|
|
|
try:
|
2015-12-07 18:26:47 +00:00
|
|
|
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
|
|
|
|
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
|
2015-10-07 13:55:08 +00:00
|
|
|
except KeyError as e:
|
2016-05-09 13:01:12 +00:00
|
|
|
logger.warning("KeyError in save_target_package_information"
|
|
|
|
"%s ", e)
|
2015-10-07 13:55:08 +00:00
|
|
|
|
|
|
|
try:
|
2014-03-19 17:39:25 +00:00
|
|
|
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
|
2015-10-07 13:55:08 +00:00
|
|
|
except KeyError as e:
|
2016-05-09 13:01:12 +00:00
|
|
|
logger.warning("KeyError in save_target_file_information"
|
|
|
|
"%s ", e)
|
2015-10-07 13:55:08 +00:00
|
|
|
|
2014-03-06 16:31:40 +00:00
|
|
|
|
|
|
|
|
2016-05-12 14:10:36 +00:00
|
|
|
def cancel_cli_build(self):
|
|
|
|
"""
|
|
|
|
If a build is currently underway, set its state to CANCELLED;
|
|
|
|
note that this only gets called for command line builds which are
|
|
|
|
interrupted, so it doesn't touch any BuildRequest objects
|
|
|
|
"""
|
|
|
|
build = self.internal_state['build']
|
|
|
|
if build:
|
|
|
|
build.outcome = Build.CANCELLED
|
|
|
|
build.save()
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
def store_dependency_information(self, event):
|
2014-02-19 17:26:18 +00:00
|
|
|
assert '_depgraph' in vars(event)
|
|
|
|
assert 'layer-priorities' in event._depgraph
|
|
|
|
assert 'pn' in event._depgraph
|
|
|
|
assert 'tdepends' in event._depgraph
|
|
|
|
|
|
|
|
errormsg = ""
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
# save layer version priorities
|
|
|
|
if 'layer-priorities' in event._depgraph.keys():
|
|
|
|
for lv in event._depgraph['layer-priorities']:
|
2015-08-18 16:28:58 +00:00
|
|
|
(_, path, _, priority) = lv
|
2013-09-26 11:50:50 +00:00
|
|
|
layer_version_obj = self._get_layer_version_for_path(path[1:]) # paths start with a ^
|
|
|
|
assert layer_version_obj is not None
|
|
|
|
layer_version_obj.priority = priority
|
|
|
|
layer_version_obj.save()
|
|
|
|
|
|
|
|
# save recipe information
|
|
|
|
self.internal_state['recipes'] = {}
|
|
|
|
for pn in event._depgraph['pn']:
|
|
|
|
|
2015-05-14 15:10:50 +00:00
|
|
|
file_name = event._depgraph['pn'][pn]['filename'].split(":")[-1]
|
|
|
|
pathflags = ":".join(sorted(event._depgraph['pn'][pn]['filename'].split(":")[:-1]))
|
|
|
|
layer_version_obj = self._get_layer_version_for_path(file_name)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
assert layer_version_obj is not None
|
|
|
|
|
|
|
|
recipe_info = {}
|
|
|
|
recipe_info['name'] = pn
|
|
|
|
recipe_info['layer_version'] = layer_version_obj
|
2014-12-09 11:57:38 +00:00
|
|
|
|
|
|
|
if 'version' in event._depgraph['pn'][pn]:
|
|
|
|
recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
|
|
|
|
|
|
|
|
if 'summary' in event._depgraph['pn'][pn]:
|
|
|
|
recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
|
|
|
|
|
|
|
|
if 'license' in event._depgraph['pn'][pn]:
|
|
|
|
recipe_info['license'] = event._depgraph['pn'][pn]['license']
|
|
|
|
|
|
|
|
if 'description' in event._depgraph['pn'][pn]:
|
|
|
|
recipe_info['description'] = event._depgraph['pn'][pn]['description']
|
|
|
|
|
|
|
|
if 'section' in event._depgraph['pn'][pn]:
|
|
|
|
recipe_info['section'] = event._depgraph['pn'][pn]['section']
|
|
|
|
|
|
|
|
if 'homepage' in event._depgraph['pn'][pn]:
|
|
|
|
recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
|
|
|
|
|
|
|
|
if 'bugtracker' in event._depgraph['pn'][pn]:
|
|
|
|
recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
recipe_info['file_path'] = file_name
|
2015-05-14 15:10:50 +00:00
|
|
|
recipe_info['pathflags'] = pathflags
|
|
|
|
|
|
|
|
if recipe_info['file_path'].startswith(recipe_info['layer_version'].local_path):
|
|
|
|
recipe_info['file_path'] = recipe_info['file_path'][len(recipe_info['layer_version'].local_path):].lstrip("/")
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Recipe file path %s is not under layer version at %s" % (recipe_info['file_path'], recipe_info['layer_version'].local_path))
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
|
2014-04-03 10:16:24 +00:00
|
|
|
recipe.is_image = False
|
2013-09-26 11:50:50 +00:00
|
|
|
if 'inherits' in event._depgraph['pn'][pn].keys():
|
2014-04-03 10:16:24 +00:00
|
|
|
for cls in event._depgraph['pn'][pn]['inherits']:
|
|
|
|
if cls.endswith('/image.bbclass'):
|
|
|
|
recipe.is_image = True
|
2016-04-01 11:53:55 +00:00
|
|
|
recipe_info['is_image'] = True
|
|
|
|
# Save the is_image state to the relevant recipe objects
|
|
|
|
self.orm_wrapper.get_update_recipe_object(recipe_info)
|
2014-04-03 10:16:24 +00:00
|
|
|
break
|
2013-09-26 11:50:50 +00:00
|
|
|
if recipe.is_image:
|
|
|
|
for t in self.internal_state['targets']:
|
|
|
|
if pn == t.target:
|
|
|
|
t.is_image = True
|
|
|
|
t.save()
|
|
|
|
self.internal_state['recipes'][pn] = recipe
|
|
|
|
|
2014-02-19 17:26:18 +00:00
|
|
|
# we'll not get recipes for key w/ values listed in ASSUME_PROVIDED
|
|
|
|
|
|
|
|
assume_provided = self.server.runCommand(["getVariable", "ASSUME_PROVIDED"])[0].split()
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
# save recipe dependency
|
|
|
|
# buildtime
|
2014-11-04 16:47:36 +00:00
|
|
|
recipedeps_objects = []
|
2013-09-26 11:50:50 +00:00
|
|
|
for recipe in event._depgraph['depends']:
|
2016-01-08 11:17:17 +00:00
|
|
|
target = self.internal_state['recipes'][recipe]
|
|
|
|
for dep in event._depgraph['depends'][recipe]:
|
|
|
|
if dep in assume_provided:
|
|
|
|
continue
|
2016-01-08 11:17:19 +00:00
|
|
|
via = None
|
2016-04-06 16:46:16 +00:00
|
|
|
if 'providermap' in event._depgraph and dep in event._depgraph['providermap']:
|
2016-01-08 11:17:19 +00:00
|
|
|
deprecipe = event._depgraph['providermap'][dep][0]
|
|
|
|
dependency = self.internal_state['recipes'][deprecipe]
|
|
|
|
via = Provides.objects.get_or_create(name=dep,
|
|
|
|
recipe=dependency)[0]
|
|
|
|
elif dep in self.internal_state['recipes']:
|
2013-09-26 11:50:50 +00:00
|
|
|
dependency = self.internal_state['recipes'][dep]
|
2016-01-08 11:17:17 +00:00
|
|
|
else:
|
|
|
|
errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep)
|
|
|
|
continue
|
|
|
|
recipe_dep = Recipe_Dependency(recipe=target,
|
|
|
|
depends_on=dependency,
|
2016-01-08 11:17:19 +00:00
|
|
|
via=via,
|
2016-01-08 11:17:17 +00:00
|
|
|
dep_type=Recipe_Dependency.TYPE_DEPENDS)
|
|
|
|
recipedeps_objects.append(recipe_dep)
|
|
|
|
|
2014-11-04 16:47:36 +00:00
|
|
|
Recipe_Dependency.objects.bulk_create(recipedeps_objects)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
# save all task information
|
|
|
|
def _save_a_task(taskdesc):
|
2014-11-04 16:47:36 +00:00
|
|
|
spec = re.split(r'\.', taskdesc)
|
2013-09-26 11:50:50 +00:00
|
|
|
pn = ".".join(spec[0:-1])
|
|
|
|
taskname = spec[-1]
|
|
|
|
e = event
|
|
|
|
e.taskname = pn
|
|
|
|
recipe = self.internal_state['recipes'][pn]
|
|
|
|
task_info = self._get_task_information(e, recipe)
|
|
|
|
task_info['task_name'] = taskname
|
|
|
|
task_obj = self.orm_wrapper.get_update_task_object(task_info)
|
|
|
|
return task_obj
|
2014-03-06 16:31:40 +00:00
|
|
|
|
2014-02-18 14:39:24 +00:00
|
|
|
# create tasks
|
|
|
|
tasks = {}
|
|
|
|
for taskdesc in event._depgraph['tdepends']:
|
|
|
|
tasks[taskdesc] = _save_a_task(taskdesc)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-02-18 14:39:24 +00:00
|
|
|
# create dependencies between tasks
|
2014-11-04 16:47:36 +00:00
|
|
|
taskdeps_objects = []
|
2013-09-26 11:50:50 +00:00
|
|
|
for taskdesc in event._depgraph['tdepends']:
|
2014-02-18 14:39:24 +00:00
|
|
|
target = tasks[taskdesc]
|
|
|
|
for taskdep in event._depgraph['tdepends'][taskdesc]:
|
|
|
|
if taskdep not in tasks:
|
|
|
|
# Fetch tasks info is not collected previously
|
|
|
|
dep = _save_a_task(taskdep)
|
|
|
|
else:
|
|
|
|
dep = tasks[taskdep]
|
2014-11-04 16:47:36 +00:00
|
|
|
taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
|
|
|
|
Task_Dependency.objects.bulk_create(taskdeps_objects)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2015-08-18 16:28:58 +00:00
|
|
|
if len(errormsg) > 0:
|
2016-05-09 13:01:12 +00:00
|
|
|
logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
|
2014-02-19 17:26:18 +00:00
|
|
|
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
def store_build_package_information(self, event):
|
2014-11-26 17:09:16 +00:00
|
|
|
package_info = BuildInfoHelper._get_data_from_event(event)
|
2015-12-07 18:26:47 +00:00
|
|
|
self.orm_wrapper.save_build_package_information(
|
|
|
|
self.internal_state['build'],
|
|
|
|
package_info,
|
|
|
|
self.internal_state['recipes'],
|
|
|
|
built_package=True)
|
|
|
|
|
|
|
|
self.orm_wrapper.save_build_package_information(
|
|
|
|
self.internal_state['build'],
|
|
|
|
package_info,
|
|
|
|
self.internal_state['recipes'],
|
|
|
|
built_package=False)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-10-13 16:10:39 +00:00
|
|
|
def _store_build_done(self, errorcode):
|
2015-06-17 16:30:34 +00:00
|
|
|
logger.info("Build exited with errorcode %d", errorcode)
|
2014-07-24 17:20:25 +00:00
|
|
|
br_id, be_id = self.brbe.split(":")
|
2014-06-12 11:57:22 +00:00
|
|
|
be = BuildEnvironment.objects.get(pk = be_id)
|
|
|
|
be.lock = BuildEnvironment.LOCK_LOCK
|
|
|
|
be.save()
|
|
|
|
br = BuildRequest.objects.get(pk = br_id)
|
2016-04-06 16:46:36 +00:00
|
|
|
|
|
|
|
# if we're 'done' because we got cancelled update the build outcome
|
|
|
|
if br.state == BuildRequest.REQ_CANCELLING:
|
|
|
|
logger.info("Build cancelled")
|
|
|
|
br.build.outcome = Build.CANCELLED
|
|
|
|
br.build.save()
|
2016-04-06 16:46:48 +00:00
|
|
|
self.internal_state['build'] = br.build
|
2016-04-06 16:46:36 +00:00
|
|
|
errorcode = 0
|
|
|
|
|
2014-10-13 16:10:39 +00:00
|
|
|
if errorcode == 0:
|
2015-02-16 17:47:07 +00:00
|
|
|
# request archival of the project artifacts
|
2016-04-06 16:46:36 +00:00
|
|
|
br.state = BuildRequest.REQ_COMPLETED
|
2014-10-13 16:10:39 +00:00
|
|
|
else:
|
|
|
|
br.state = BuildRequest.REQ_FAILED
|
2014-06-12 11:57:22 +00:00
|
|
|
br.save()
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
def store_log_error(self, text):
|
2014-07-24 17:20:25 +00:00
|
|
|
mockevent = MockEvent()
|
2015-08-18 16:28:58 +00:00
|
|
|
mockevent.levelno = formatter.ERROR
|
2014-07-24 17:20:25 +00:00
|
|
|
mockevent.msg = text
|
2014-11-04 16:47:36 +00:00
|
|
|
mockevent.pathname = '-- None'
|
2015-06-17 16:30:34 +00:00
|
|
|
mockevent.lineno = LogMessage.ERROR
|
2014-07-24 17:20:25 +00:00
|
|
|
self.store_log_event(mockevent)
|
2013-09-26 11:50:50 +00:00
|
|
|
|
2014-11-25 10:12:46 +00:00
|
|
|
def store_log_exception(self, text, backtrace = ""):
|
|
|
|
mockevent = MockEvent()
|
|
|
|
mockevent.levelno = -1
|
|
|
|
mockevent.msg = text
|
|
|
|
mockevent.pathname = backtrace
|
|
|
|
mockevent.lineno = -1
|
|
|
|
self.store_log_event(mockevent)
|
|
|
|
|
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
def store_log_event(self, event):
|
2015-08-18 16:28:58 +00:00
|
|
|
if event.levelno < formatter.WARNING:
|
2014-07-24 17:20:25 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if 'args' in vars(event):
|
|
|
|
event.msg = event.msg % event.args
|
|
|
|
|
|
|
|
if not 'build' in self.internal_state:
|
|
|
|
if self.brbe is None:
|
|
|
|
if not 'backlog' in self.internal_state:
|
|
|
|
self.internal_state['backlog'] = []
|
|
|
|
self.internal_state['backlog'].append(event)
|
2015-06-17 16:30:34 +00:00
|
|
|
return
|
|
|
|
else: # we're under Toaster control, the build is already created
|
2015-08-18 16:28:58 +00:00
|
|
|
br, _ = self.brbe.split(":")
|
2014-07-24 17:20:25 +00:00
|
|
|
buildrequest = BuildRequest.objects.get(pk = br)
|
2015-06-17 16:30:34 +00:00
|
|
|
self.internal_state['build'] = buildrequest.build
|
2014-07-24 17:20:25 +00:00
|
|
|
|
2014-04-02 10:56:52 +00:00
|
|
|
if 'build' in self.internal_state and 'backlog' in self.internal_state:
|
2015-01-19 16:28:19 +00:00
|
|
|
# if we have a backlog of events, do our best to save them here
|
2014-04-02 10:56:52 +00:00
|
|
|
if len(self.internal_state['backlog']):
|
|
|
|
tempevent = self.internal_state['backlog'].pop()
|
2014-01-20 09:39:34 +00:00
|
|
|
logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
|
2014-04-02 10:56:52 +00:00
|
|
|
self.store_log_event(tempevent)
|
|
|
|
else:
|
2015-05-14 15:44:27 +00:00
|
|
|
logger.info("buildinfohelper: All events saved")
|
2014-04-02 10:56:52 +00:00
|
|
|
del self.internal_state['backlog']
|
2013-09-26 11:50:50 +00:00
|
|
|
|
|
|
|
log_information = {}
|
|
|
|
log_information['build'] = self.internal_state['build']
|
2015-10-15 12:45:13 +00:00
|
|
|
if event.levelno == formatter.CRITICAL:
|
|
|
|
log_information['level'] = LogMessage.CRITICAL
|
|
|
|
elif event.levelno == formatter.ERROR:
|
2014-10-28 17:47:12 +00:00
|
|
|
log_information['level'] = LogMessage.ERROR
|
2015-08-18 16:28:58 +00:00
|
|
|
elif event.levelno == formatter.WARNING:
|
2013-09-26 11:50:50 +00:00
|
|
|
log_information['level'] = LogMessage.WARNING
|
2015-06-17 16:30:34 +00:00
|
|
|
elif event.levelno == -2: # toaster self-logging
|
|
|
|
log_information['level'] = -2
|
2014-07-24 17:20:25 +00:00
|
|
|
else:
|
2014-10-28 17:47:12 +00:00
|
|
|
log_information['level'] = LogMessage.INFO
|
2014-07-24 17:20:25 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
log_information['message'] = event.msg
|
|
|
|
log_information['pathname'] = event.pathname
|
|
|
|
log_information['lineno'] = event.lineno
|
2015-08-18 16:28:58 +00:00
|
|
|
logger.info("Logging error 2: %s", log_information)
|
2015-10-15 12:45:13 +00:00
|
|
|
|
2013-09-26 11:50:50 +00:00
|
|
|
self.orm_wrapper.create_logmessage(log_information)
|
|
|
|
|
2016-07-12 22:54:44 +00:00
|
|
|
def _get_files_from_image_license(self, image_license_manifest_path):
|
|
|
|
"""
|
|
|
|
Find the FILES line in the image_license.manifest file,
|
|
|
|
which has the basenames of the bzImage and modules files
|
|
|
|
in this format:
|
|
|
|
FILES: bzImage--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.bin modules--4.4.11+git0+3a5f494784_53e84104c5-r0-qemux86-20160603165040.tgz
|
|
|
|
"""
|
|
|
|
files = []
|
|
|
|
with open(image_license_manifest_path) as image_license:
|
|
|
|
for line in image_license:
|
|
|
|
if line.startswith('FILES'):
|
|
|
|
files_str = line.split(':')[1].strip()
|
|
|
|
files_str = re.sub(r' {2,}', ' ', files_str)
|
|
|
|
files = files_str.split(' ')
|
|
|
|
return files
|
|
|
|
|
|
|
|
def _endswith(self, str_to_test, endings):
|
|
|
|
"""
|
|
|
|
Returns True if str ends with one of the strings in the list
|
|
|
|
endings, False otherwise
|
|
|
|
"""
|
|
|
|
endswith = False
|
|
|
|
for ending in endings:
|
|
|
|
if str_to_test.endswith(ending):
|
|
|
|
endswith = True
|
|
|
|
break
|
|
|
|
return endswith
|
|
|
|
|
|
|
|
def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions):
|
|
|
|
"""
|
|
|
|
Find files in deploy_dir_image whose basename starts with the
|
|
|
|
string image_name and ends with one of the strings in
|
|
|
|
image_file_extensions.
|
|
|
|
|
|
|
|
Returns a list of file dictionaries like
|
|
|
|
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'path': '/path/to/image/file',
|
|
|
|
'size': <file size in bytes>
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
image_files = []
|
|
|
|
|
|
|
|
for dirpath, _, filenames in os.walk(deploy_dir_image):
|
|
|
|
for filename in filenames:
|
|
|
|
if filename.startswith(image_name) and \
|
|
|
|
self._endswith(filename, image_file_extensions):
|
|
|
|
image_file_path = os.path.join(dirpath, filename)
|
|
|
|
image_file_size = os.stat(image_file_path).st_size
|
|
|
|
|
|
|
|
image_files.append({
|
|
|
|
'path': image_file_path,
|
|
|
|
'size': image_file_size
|
|
|
|
})
|
|
|
|
|
|
|
|
return image_files
|
|
|
|
|
|
|
|
def scan_build_artifacts(self):
|
|
|
|
"""
|
|
|
|
Scan for build artifacts in DEPLOY_DIR_IMAGE and associate them
|
|
|
|
with a Target object in self.internal_state['targets'].
|
|
|
|
|
|
|
|
We have two situations to handle:
|
|
|
|
|
|
|
|
1. This is the first time a target + machine has been built, so
|
|
|
|
add files from the DEPLOY_DIR_IMAGE to the target.
|
|
|
|
|
|
|
|
OR
|
|
|
|
|
|
|
|
2. There are no files for the target, so copy them from a
|
|
|
|
previous build with the same target + machine.
|
|
|
|
"""
|
|
|
|
deploy_dir_image = \
|
|
|
|
self.server.runCommand(['getVariable', 'DEPLOY_DIR_IMAGE'])[0]
|
|
|
|
|
|
|
|
# if there's no DEPLOY_DIR_IMAGE, there aren't going to be
|
|
|
|
# any build artifacts, so we can return immediately
|
|
|
|
if not deploy_dir_image:
|
|
|
|
return
|
|
|
|
|
|
|
|
buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
|
|
|
|
machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
|
|
|
|
image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
|
|
|
|
|
|
|
|
# location of the image_license.manifest files for this build;
|
|
|
|
# note that this file is only produced if an image is produced
|
|
|
|
license_directory = \
|
|
|
|
self.server.runCommand(['getVariable', 'LICENSE_DIRECTORY'])[0]
|
|
|
|
|
|
|
|
# file name extensions for image files
|
|
|
|
image_file_extensions_unique = {}
|
|
|
|
image_fstypes = self.server.runCommand(
|
|
|
|
['getVariable', 'IMAGE_FSTYPES'])[0]
|
|
|
|
if image_fstypes != None:
|
|
|
|
image_types_str = image_fstypes.strip()
|
|
|
|
image_file_extensions = re.sub(r' {2,}', ' ', image_types_str)
|
|
|
|
image_file_extensions_unique = set(image_file_extensions.split(' '))
|
|
|
|
|
|
|
|
targets = self.internal_state['targets']
|
|
|
|
image_targets = [target for target in targets if target.is_image]
|
|
|
|
for target in image_targets:
|
|
|
|
# this is set to True if we find at least one file relating to
|
|
|
|
# this target; if this remains False after the scan, we copy the
|
|
|
|
# files from the most-recent Target with the same target + machine
|
|
|
|
# onto this Target instead
|
|
|
|
has_files = False
|
|
|
|
|
|
|
|
# we construct this because by the time we reach
|
|
|
|
# BuildCompleted, this has reset to
|
|
|
|
# 'defaultpkgname-<MACHINE>-<BUILDNAME>';
|
|
|
|
# we need to change it to
|
|
|
|
# <TARGET>-<MACHINE>-<BUILDNAME>
|
|
|
|
real_image_name = re.sub(r'^defaultpkgname', target.target,
|
|
|
|
image_name)
|
|
|
|
|
|
|
|
image_license_manifest_path = os.path.join(
|
|
|
|
license_directory,
|
|
|
|
real_image_name,
|
|
|
|
'image_license.manifest')
|
|
|
|
|
|
|
|
# if image_license.manifest exists, we can read the names of bzImage
|
|
|
|
# and modules files for this build from it, then look for them
|
|
|
|
# in the DEPLOY_DIR_IMAGE; note that this file is only produced
|
|
|
|
# if an image file was produced
|
|
|
|
if os.path.isfile(image_license_manifest_path):
|
|
|
|
has_files = True
|
|
|
|
|
|
|
|
basenames = self._get_files_from_image_license(
|
|
|
|
image_license_manifest_path)
|
|
|
|
|
|
|
|
for basename in basenames:
|
|
|
|
artifact_path = os.path.join(deploy_dir_image, basename)
|
|
|
|
artifact_size = os.stat(artifact_path).st_size
|
|
|
|
|
|
|
|
self.orm_wrapper.save_artifact_information_no_dedupe(
|
|
|
|
self.internal_state['build'], artifact_path,
|
|
|
|
artifact_size)
|
|
|
|
|
|
|
|
# store the license manifest path on the target
|
|
|
|
# (this file is also created any time an image file is created)
|
|
|
|
license_path = os.path.join(license_directory,
|
|
|
|
real_image_name, 'license.manifest')
|
|
|
|
|
|
|
|
self.orm_wrapper.update_target_set_license_manifest(target,
|
|
|
|
license_path)
|
|
|
|
|
|
|
|
# scan the directory for image files relating to this build
|
|
|
|
# (via real_image_name); note that we don't have to set
|
|
|
|
# has_files = True, as searching for the license manifest file
|
|
|
|
# will already have set it to true if at least one image file was
|
|
|
|
# produced
|
|
|
|
image_files = self._get_image_files(deploy_dir_image,
|
|
|
|
real_image_name, image_file_extensions_unique)
|
|
|
|
|
|
|
|
for image_file in image_files:
|
|
|
|
self.orm_wrapper.save_target_image_file_information(
|
|
|
|
target, image_file['path'], image_file['size'])
|
|
|
|
|
|
|
|
if not has_files:
|
|
|
|
# TODO copy artifact and image files from the
|
|
|
|
# most-recently-built Target with the same target + machine
|
|
|
|
# as this Target; also copy the license manifest path,
|
|
|
|
# as that is treated differently
|
|
|
|
pass
|
|
|
|
|
2014-10-13 16:10:39 +00:00
|
|
|
def close(self, errorcode):
|
2014-07-24 17:20:25 +00:00
|
|
|
if self.brbe is not None:
|
2014-10-13 16:10:39 +00:00
|
|
|
self._store_build_done(errorcode)
|
2014-07-24 17:20:25 +00:00
|
|
|
|
|
|
|
if 'backlog' in self.internal_state:
|
2015-01-19 16:28:19 +00:00
|
|
|
if 'build' in self.internal_state:
|
|
|
|
# we save missed events in the database for the current build
|
|
|
|
tempevent = self.internal_state['backlog'].pop()
|
|
|
|
self.store_log_event(tempevent)
|
|
|
|
else:
|
|
|
|
# we have no build, and we still have events; something amazingly wrong happend
|
|
|
|
for event in self.internal_state['backlog']:
|
2015-08-18 16:28:58 +00:00
|
|
|
logger.error("UNSAVED log: %s", event.msg)
|
2015-02-24 17:20:57 +00:00
|
|
|
|
2015-02-26 21:41:58 +00:00
|
|
|
if not connection.features.autocommits_when_autocommit_is_off:
|
|
|
|
transaction.set_autocommit(True)
|
2016-02-10 18:34:10 +00:00
|
|
|
|
|
|
|
# unset the brbe; this is to prevent subsequent command-line builds
|
|
|
|
# being incorrectly attached to the previous Toaster-triggered build;
|
|
|
|
# see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9021
|
|
|
|
self.brbe = None
|