2013-10-11 12:46:23 +00:00
|
|
|
#
|
2014-01-14 12:50:32 +00:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
|
|
#
|
2013-10-11 12:46:23 +00:00
|
|
|
# BitBake Toaster Implementation
|
|
|
|
#
|
|
|
|
# Copyright (C) 2013 Intel Corporation
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
|
|
|
2016-03-08 18:09:02 +00:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2016-07-27 12:15:02 +00:00
|
|
|
from django.db import models, IntegrityError, DataError
|
2016-07-21 13:43:25 +00:00
|
|
|
from django.db.models import F, Q, Sum, Count
|
2014-08-08 14:03:03 +00:00
|
|
|
from django.utils import timezone
|
2016-03-08 18:09:02 +00:00
|
|
|
from django.utils.encoding import force_bytes
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2015-08-04 19:46:29 +00:00
|
|
|
from django.core.urlresolvers import reverse
|
2014-10-09 11:37:30 +00:00
|
|
|
|
|
|
|
from django.core import validators
|
2014-12-05 15:14:20 +00:00
|
|
|
from django.conf import settings
|
2015-05-19 15:10:19 +00:00
|
|
|
import django.db.models.signals
|
2014-10-09 11:37:30 +00:00
|
|
|
|
2016-05-09 23:01:51 +00:00
|
|
|
import sys
|
2016-09-16 16:22:29 +00:00
|
|
|
import os
|
2016-01-15 11:00:46 +00:00
|
|
|
import re
|
2016-01-18 14:23:55 +00:00
|
|
|
import itertools
|
2016-09-16 16:22:29 +00:00
|
|
|
from signal import SIGUSR1
|
2016-01-15 11:00:46 +00:00
|
|
|
|
2015-08-18 16:28:52 +00:00
|
|
|
import logging
|
|
|
|
logger = logging.getLogger("toaster")
|
|
|
|
|
2015-12-10 03:56:40 +00:00
|
|
|
if 'sqlite' in settings.DATABASES['default']['ENGINE']:
|
|
|
|
from django.db import transaction, OperationalError
|
|
|
|
from time import sleep
|
|
|
|
|
|
|
|
_base_save = models.Model.save
|
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
with transaction.atomic():
|
|
|
|
return _base_save(self, *args, **kwargs)
|
|
|
|
except OperationalError as err:
|
|
|
|
if 'database is locked' in str(err):
|
|
|
|
logger.warning("%s, model: %s, args: %s, kwargs: %s",
|
|
|
|
err, self.__class__, args, kwargs)
|
|
|
|
sleep(0.5)
|
|
|
|
continue
|
|
|
|
raise
|
|
|
|
|
|
|
|
models.Model.save = save
|
2015-08-18 16:28:52 +00:00
|
|
|
|
2015-12-10 03:56:42 +00:00
|
|
|
# HACK: Monkey patch Django to fix 'database is locked' issue
|
|
|
|
|
|
|
|
from django.db.models.query import QuerySet
|
|
|
|
_base_insert = QuerySet._insert
|
|
|
|
def _insert(self, *args, **kwargs):
|
|
|
|
with transaction.atomic(using=self.db, savepoint=False):
|
|
|
|
return _base_insert(self, *args, **kwargs)
|
|
|
|
QuerySet._insert = _insert
|
|
|
|
|
|
|
|
from django.utils import six
|
|
|
|
def _create_object_from_params(self, lookup, params):
|
|
|
|
"""
|
|
|
|
Tries to create an object using passed params.
|
|
|
|
Used by get_or_create and update_or_create
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
obj = self.create(**params)
|
|
|
|
return obj, True
|
2016-07-27 12:15:02 +00:00
|
|
|
except (IntegrityError, DataError):
|
2015-12-10 03:56:42 +00:00
|
|
|
exc_info = sys.exc_info()
|
|
|
|
try:
|
|
|
|
return self.get(**lookup), False
|
|
|
|
except self.model.DoesNotExist:
|
|
|
|
pass
|
|
|
|
six.reraise(*exc_info)
|
|
|
|
|
|
|
|
QuerySet._create_object_from_params = _create_object_from_params
|
|
|
|
|
|
|
|
# end of HACK
|
|
|
|
|
2014-10-09 11:37:30 +00:00
|
|
|
class GitURLValidator(validators.URLValidator):
|
|
|
|
import re
|
|
|
|
regex = re.compile(
|
|
|
|
r'^(?:ssh|git|http|ftp)s?://' # http:// or https://
|
|
|
|
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
|
|
|
r'localhost|' # localhost...
|
|
|
|
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
|
|
|
|
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
|
|
|
|
r'(?::\d+)?' # optional port
|
|
|
|
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
|
|
|
|
|
|
|
def GitURLField(**kwargs):
|
|
|
|
r = models.URLField(**kwargs)
|
2016-05-10 13:13:50 +00:00
|
|
|
for i in range(len(r.validators)):
|
2014-10-09 11:37:30 +00:00
|
|
|
if isinstance(r.validators[i], validators.URLValidator):
|
|
|
|
r.validators[i] = GitURLValidator()
|
|
|
|
return r
|
|
|
|
|
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
class ToasterSetting(models.Model):
|
|
|
|
name = models.CharField(max_length=63)
|
|
|
|
helptext = models.TextField()
|
|
|
|
value = models.CharField(max_length=255)
|
|
|
|
|
2014-10-09 11:37:30 +00:00
|
|
|
def __unicode__(self):
|
2015-05-01 21:58:15 +00:00
|
|
|
return "Setting %s = %s" % (self.name, self.value)
|
2014-10-09 11:37:30 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
|
2014-06-30 14:58:36 +00:00
|
|
|
class ProjectManager(models.Manager):
|
2014-08-08 14:03:03 +00:00
|
|
|
def create_project(self, name, release):
|
2015-06-22 13:48:53 +00:00
|
|
|
if release is not None:
|
2016-07-21 13:43:25 +00:00
|
|
|
prj = self.model(name=name,
|
|
|
|
bitbake_version=release.bitbake_version,
|
|
|
|
release=release)
|
2015-06-22 13:48:53 +00:00
|
|
|
else:
|
2016-07-21 13:43:25 +00:00
|
|
|
prj = self.model(name=name,
|
|
|
|
bitbake_version=None,
|
|
|
|
release=None)
|
2015-06-22 13:48:53 +00:00
|
|
|
|
2014-06-30 14:58:36 +00:00
|
|
|
prj.save()
|
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
for defaultconf in ToasterSetting.objects.filter(
|
|
|
|
name__startswith="DEFCONF_"):
|
2014-08-08 14:03:03 +00:00
|
|
|
name = defaultconf.name[8:]
|
2016-07-21 13:43:25 +00:00
|
|
|
ProjectVariable.objects.create(project=prj,
|
|
|
|
name=name,
|
|
|
|
value=defaultconf.value)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2015-06-22 13:48:53 +00:00
|
|
|
if release is None:
|
|
|
|
return prj
|
2014-11-14 17:07:06 +00:00
|
|
|
|
|
|
|
for rdl in release.releasedefaultlayer_set.all():
|
2016-07-21 13:43:25 +00:00
|
|
|
lv = Layer_Version.objects.filter(
|
|
|
|
layer__name=rdl.layer_name,
|
2016-07-21 13:43:30 +00:00
|
|
|
release=release).first()
|
2016-07-21 13:43:25 +00:00
|
|
|
|
|
|
|
if lv:
|
|
|
|
ProjectLayer.objects.create(project=prj,
|
|
|
|
layercommit=lv,
|
|
|
|
optional=False)
|
|
|
|
else:
|
|
|
|
logger.warning("Default project layer %s not found" %
|
|
|
|
rdl.layer_name)
|
2014-06-30 14:58:36 +00:00
|
|
|
|
|
|
|
return prj
|
|
|
|
|
2015-09-03 00:25:08 +00:00
|
|
|
# return single object with is_default = True
|
2015-12-10 03:56:29 +00:00
|
|
|
def get_or_create_default_project(self):
|
2016-07-21 13:43:25 +00:00
|
|
|
projects = super(ProjectManager, self).filter(is_default=True)
|
2015-12-10 03:56:29 +00:00
|
|
|
|
2015-09-03 00:25:08 +00:00
|
|
|
if len(projects) > 1:
|
2015-12-10 03:56:29 +00:00
|
|
|
raise Exception('Inconsistent project data: multiple ' +
|
|
|
|
'default projects (i.e. with is_default=True)')
|
2015-09-03 00:25:08 +00:00
|
|
|
elif len(projects) < 1:
|
2015-12-10 03:56:29 +00:00
|
|
|
options = {
|
|
|
|
'name': 'Command line builds',
|
2016-07-21 13:43:25 +00:00
|
|
|
'short_description':
|
|
|
|
'Project for builds started outside Toaster',
|
2015-12-10 03:56:29 +00:00
|
|
|
'is_default': True
|
|
|
|
}
|
|
|
|
project = Project.objects.create(**options)
|
|
|
|
project.save()
|
|
|
|
|
|
|
|
return project
|
|
|
|
else:
|
|
|
|
return projects[0]
|
2014-06-30 14:58:36 +00:00
|
|
|
|
2014-06-03 15:26:17 +00:00
|
|
|
class Project(models.Model):
|
2014-11-14 17:07:06 +00:00
|
|
|
search_allowed_fields = ['name', 'short_description', 'release__name', 'release__branch_name']
|
2014-06-03 15:26:17 +00:00
|
|
|
name = models.CharField(max_length=100)
|
2014-06-30 14:58:36 +00:00
|
|
|
short_description = models.CharField(max_length=50, blank=True)
|
2015-05-14 15:44:27 +00:00
|
|
|
bitbake_version = models.ForeignKey('BitbakeVersion', null=True)
|
|
|
|
release = models.ForeignKey("Release", null=True)
|
2014-06-03 15:26:17 +00:00
|
|
|
created = models.DateTimeField(auto_now_add = True)
|
|
|
|
updated = models.DateTimeField(auto_now = True)
|
2014-06-30 14:58:36 +00:00
|
|
|
# This is a horrible hack; since Toaster has no "User" model available when
|
|
|
|
# running in interactive mode, we can't reference the field here directly
|
|
|
|
# Instead, we keep a possible null reference to the User id, as not to force
|
|
|
|
# hard links to possibly missing models
|
|
|
|
user_id = models.IntegerField(null = True)
|
|
|
|
objects = ProjectManager()
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2015-09-03 00:25:08 +00:00
|
|
|
# set to True for the project which is the default container
|
|
|
|
# for builds initiated by the command line etc.
|
|
|
|
is_default = models.BooleanField(default = False)
|
|
|
|
|
2014-11-14 17:07:06 +00:00
|
|
|
def __unicode__(self):
|
2015-03-12 11:30:43 +00:00
|
|
|
return "%s (Release %s, BBV %s)" % (self.name, self.release, self.bitbake_version)
|
2014-07-15 12:01:56 +00:00
|
|
|
|
2015-02-26 21:42:00 +00:00
|
|
|
def get_current_machine_name(self):
|
|
|
|
try:
|
|
|
|
return self.projectvariable_set.get(name="MACHINE").value
|
|
|
|
except (ProjectVariable.DoesNotExist,IndexError):
|
2015-11-13 13:48:34 +00:00
|
|
|
return None;
|
2015-02-26 21:42:00 +00:00
|
|
|
|
|
|
|
def get_number_of_builds(self):
|
2015-11-13 13:48:34 +00:00
|
|
|
"""Return the number of builds which have ended"""
|
|
|
|
|
2016-04-06 16:46:39 +00:00
|
|
|
return self.build_set.exclude(
|
|
|
|
Q(outcome=Build.IN_PROGRESS) |
|
|
|
|
Q(outcome=Build.CANCELLED)
|
|
|
|
).count()
|
2015-02-26 21:42:00 +00:00
|
|
|
|
|
|
|
def get_last_build_id(self):
|
|
|
|
try:
|
|
|
|
return Build.objects.filter( project = self.id ).order_by('-completed_on')[0].id
|
|
|
|
except (Build.DoesNotExist,IndexError):
|
|
|
|
return( -1 )
|
|
|
|
|
|
|
|
def get_last_outcome(self):
|
|
|
|
build_id = self.get_last_build_id
|
|
|
|
if (-1 == build_id):
|
|
|
|
return( "" )
|
|
|
|
try:
|
|
|
|
return Build.objects.filter( id = self.get_last_build_id )[ 0 ].outcome
|
|
|
|
except (Build.DoesNotExist,IndexError):
|
|
|
|
return( "not_found" )
|
|
|
|
|
|
|
|
def get_last_target(self):
|
|
|
|
build_id = self.get_last_build_id
|
|
|
|
if (-1 == build_id):
|
|
|
|
return( "" )
|
|
|
|
try:
|
|
|
|
return Target.objects.filter(build = build_id)[0].target
|
|
|
|
except (Target.DoesNotExist,IndexError):
|
|
|
|
return( "not_found" )
|
|
|
|
|
|
|
|
def get_last_errors(self):
|
|
|
|
build_id = self.get_last_build_id
|
|
|
|
if (-1 == build_id):
|
|
|
|
return( 0 )
|
|
|
|
try:
|
2015-06-17 16:30:34 +00:00
|
|
|
return Build.objects.filter(id = build_id)[ 0 ].errors.count()
|
2015-02-26 21:42:00 +00:00
|
|
|
except (Build.DoesNotExist,IndexError):
|
|
|
|
return( "not_found" )
|
|
|
|
|
|
|
|
def get_last_warnings(self):
|
|
|
|
build_id = self.get_last_build_id
|
|
|
|
if (-1 == build_id):
|
|
|
|
return( 0 )
|
|
|
|
try:
|
2015-06-17 16:30:34 +00:00
|
|
|
return Build.objects.filter(id = build_id)[ 0 ].warnings.count()
|
2015-02-26 21:42:00 +00:00
|
|
|
except (Build.DoesNotExist,IndexError):
|
|
|
|
return( "not_found" )
|
|
|
|
|
2016-01-15 11:00:46 +00:00
|
|
|
def get_last_build_extensions(self):
|
|
|
|
"""
|
|
|
|
Get list of file name extensions for images produced by the most
|
|
|
|
recent build
|
|
|
|
"""
|
|
|
|
last_build = Build.objects.get(pk = self.get_last_build_id())
|
|
|
|
return last_build.get_image_file_extensions()
|
|
|
|
|
2015-02-26 21:42:00 +00:00
|
|
|
def get_last_imgfiles(self):
|
|
|
|
build_id = self.get_last_build_id
|
|
|
|
if (-1 == build_id):
|
|
|
|
return( "" )
|
|
|
|
try:
|
|
|
|
return Variable.objects.filter(build = build_id, variable_name = "IMAGE_FSTYPES")[ 0 ].variable_value
|
|
|
|
except (Variable.DoesNotExist,IndexError):
|
|
|
|
return( "not_found" )
|
|
|
|
|
2015-09-29 09:50:10 +00:00
|
|
|
def get_all_compatible_layer_versions(self):
|
|
|
|
""" Returns Queryset of all Layer_Versions which are compatible with
|
|
|
|
this project"""
|
2016-04-19 16:28:42 +00:00
|
|
|
queryset = None
|
|
|
|
|
|
|
|
# guard on release, as it can be null
|
|
|
|
if self.release:
|
|
|
|
queryset = Layer_Version.objects.filter(
|
2016-07-21 13:43:30 +00:00
|
|
|
(Q(release=self.release) &
|
2016-04-19 16:28:42 +00:00
|
|
|
Q(build=None) &
|
|
|
|
Q(project=None)) |
|
|
|
|
Q(project=self))
|
|
|
|
else:
|
|
|
|
queryset = Layer_Version.objects.none()
|
2015-09-29 09:50:10 +00:00
|
|
|
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
def get_project_layer_versions(self, pk=False):
|
|
|
|
""" Returns the Layer_Versions currently added to this project """
|
2015-12-07 18:15:43 +00:00
|
|
|
layer_versions = self.projectlayer_set.all().values_list('layercommit',
|
|
|
|
flat=True)
|
2015-09-29 09:50:10 +00:00
|
|
|
|
|
|
|
if pk is False:
|
2015-12-07 18:15:43 +00:00
|
|
|
return Layer_Version.objects.filter(pk__in=layer_versions)
|
2015-09-29 09:50:10 +00:00
|
|
|
else:
|
2015-12-07 18:15:43 +00:00
|
|
|
return layer_versions
|
2015-09-29 09:50:10 +00:00
|
|
|
|
2014-12-03 13:55:19 +00:00
|
|
|
|
2015-08-04 19:46:30 +00:00
|
|
|
def get_available_machines(self):
|
|
|
|
""" Returns QuerySet of all Machines which are provided by the
|
|
|
|
Layers currently added to the Project """
|
2015-09-29 09:50:10 +00:00
|
|
|
queryset = Machine.objects.filter(
|
2015-10-07 03:21:25 +00:00
|
|
|
layer_version__in=self.get_project_layer_versions())
|
2015-09-29 09:50:10 +00:00
|
|
|
|
2015-08-04 19:46:30 +00:00
|
|
|
return queryset
|
|
|
|
|
|
|
|
def get_all_compatible_machines(self):
|
|
|
|
""" Returns QuerySet of all the compatible machines available to the
|
|
|
|
project including ones from Layers not currently added """
|
2015-09-29 09:50:10 +00:00
|
|
|
queryset = Machine.objects.filter(
|
|
|
|
layer_version__in=self.get_all_compatible_layer_versions())
|
2015-08-04 19:46:30 +00:00
|
|
|
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
def get_available_recipes(self):
|
2015-09-29 09:50:10 +00:00
|
|
|
""" Returns QuerySet of all the recipes that are provided by layers
|
|
|
|
added to this project """
|
|
|
|
queryset = Recipe.objects.filter(
|
|
|
|
layer_version__in=self.get_project_layer_versions())
|
2015-08-04 19:46:30 +00:00
|
|
|
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
def get_all_compatible_recipes(self):
|
|
|
|
""" Returns QuerySet of all the compatible Recipes available to the
|
|
|
|
project including ones from Layers not currently added """
|
2015-09-29 09:50:10 +00:00
|
|
|
queryset = Recipe.objects.filter(
|
2015-10-01 21:19:57 +00:00
|
|
|
layer_version__in=self.get_all_compatible_layer_versions()).exclude(name__exact='')
|
2015-08-04 19:46:30 +00:00
|
|
|
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
|
2014-07-15 12:01:56 +00:00
|
|
|
def schedule_build(self):
|
2014-08-08 14:03:03 +00:00
|
|
|
from bldcontrol.models import BuildRequest, BRTarget, BRLayer, BRVariable, BRBitbake
|
2014-07-15 12:01:56 +00:00
|
|
|
br = BuildRequest.objects.create(project = self)
|
2014-09-09 10:47:13 +00:00
|
|
|
try:
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2014-09-09 10:47:13 +00:00
|
|
|
BRBitbake.objects.create(req = br,
|
|
|
|
giturl = self.bitbake_version.giturl,
|
|
|
|
commit = self.bitbake_version.branch,
|
|
|
|
dirpath = self.bitbake_version.dirpath)
|
|
|
|
|
2014-11-05 14:47:51 +00:00
|
|
|
for l in self.projectlayer_set.all().order_by("pk"):
|
2015-03-04 15:52:01 +00:00
|
|
|
commit = l.layercommit.get_vcs_reference()
|
|
|
|
print("ii Building layer ", l.layercommit.layer.name, " at vcs point ", commit)
|
2016-08-08 14:11:27 +00:00
|
|
|
BRLayer.objects.create(
|
|
|
|
req=br,
|
|
|
|
name=l.layercommit.layer.name,
|
|
|
|
giturl=l.layercommit.layer.vcs_url,
|
|
|
|
commit=commit,
|
|
|
|
dirpath=l.layercommit.dirpath,
|
|
|
|
layer_version=l.layercommit,
|
|
|
|
local_source_dir=l.layercommit.layer.local_source_dir
|
|
|
|
)
|
2015-06-11 17:27:53 +00:00
|
|
|
|
|
|
|
br.state = BuildRequest.REQ_QUEUED
|
|
|
|
now = timezone.now()
|
|
|
|
br.build = Build.objects.create(project = self,
|
|
|
|
completed_on=now,
|
|
|
|
started_on=now,
|
|
|
|
)
|
2014-09-09 10:47:13 +00:00
|
|
|
for t in self.projecttarget_set.all():
|
|
|
|
BRTarget.objects.create(req = br, target = t.target, task = t.task)
|
2015-09-22 09:34:52 +00:00
|
|
|
Target.objects.create(build = br.build, target = t.target, task = t.task)
|
2015-06-11 17:27:53 +00:00
|
|
|
|
2014-09-09 10:47:13 +00:00
|
|
|
for v in self.projectvariable_set.all():
|
|
|
|
BRVariable.objects.create(req = br, name = v.name, value = v.value)
|
|
|
|
|
2015-06-11 17:27:53 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
br.build.machine = self.projectvariable_set.get(name = 'MACHINE').value
|
|
|
|
br.build.save()
|
|
|
|
except ProjectVariable.DoesNotExist:
|
|
|
|
pass
|
2014-09-09 10:47:13 +00:00
|
|
|
br.save()
|
2015-08-18 16:28:50 +00:00
|
|
|
except Exception:
|
|
|
|
# revert the build request creation since we're not done cleanly
|
2014-09-09 10:47:13 +00:00
|
|
|
br.delete()
|
2015-08-18 16:28:50 +00:00
|
|
|
raise
|
2014-07-15 12:01:56 +00:00
|
|
|
return br
|
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Build(models.Model):
|
|
|
|
SUCCEEDED = 0
|
|
|
|
FAILED = 1
|
|
|
|
IN_PROGRESS = 2
|
2016-04-06 16:46:34 +00:00
|
|
|
CANCELLED = 3
|
2013-10-11 12:46:23 +00:00
|
|
|
|
|
|
|
BUILD_OUTCOME = (
|
|
|
|
(SUCCEEDED, 'Succeeded'),
|
|
|
|
(FAILED, 'Failed'),
|
|
|
|
(IN_PROGRESS, 'In Progress'),
|
2016-04-06 16:46:34 +00:00
|
|
|
(CANCELLED, 'Cancelled'),
|
2013-10-11 12:46:23 +00:00
|
|
|
)
|
|
|
|
|
2014-02-20 12:47:55 +00:00
|
|
|
search_allowed_fields = ['machine', 'cooker_log_path', "target__target", "target__target_image_file__file_name"]
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2015-05-14 15:44:27 +00:00
|
|
|
project = models.ForeignKey(Project) # must have a project
|
2013-10-11 12:46:23 +00:00
|
|
|
machine = models.CharField(max_length=100)
|
|
|
|
distro = models.CharField(max_length=100)
|
|
|
|
distro_version = models.CharField(max_length=100)
|
|
|
|
started_on = models.DateTimeField()
|
|
|
|
completed_on = models.DateTimeField()
|
|
|
|
outcome = models.IntegerField(choices=BUILD_OUTCOME, default=IN_PROGRESS)
|
|
|
|
cooker_log_path = models.CharField(max_length=500)
|
2016-07-11 13:47:06 +00:00
|
|
|
build_name = models.CharField(max_length=100, default='')
|
2013-10-11 12:46:23 +00:00
|
|
|
bitbake_version = models.CharField(max_length=50)
|
|
|
|
|
2016-07-11 13:47:06 +00:00
|
|
|
# number of recipes to parse for this build
|
|
|
|
recipes_to_parse = models.IntegerField(default=1)
|
|
|
|
|
|
|
|
# number of recipes parsed so far for this build
|
|
|
|
recipes_parsed = models.IntegerField(default=0)
|
|
|
|
|
2016-01-18 14:23:55 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_recent(project=None):
|
|
|
|
"""
|
|
|
|
Return recent builds as a list; if project is set, only return
|
|
|
|
builds for that project
|
|
|
|
"""
|
|
|
|
|
|
|
|
builds = Build.objects.all()
|
|
|
|
|
|
|
|
if project:
|
|
|
|
builds = builds.filter(project=project)
|
|
|
|
|
2016-04-06 16:46:34 +00:00
|
|
|
finished_criteria = \
|
|
|
|
Q(outcome=Build.SUCCEEDED) | \
|
|
|
|
Q(outcome=Build.FAILED) | \
|
|
|
|
Q(outcome=Build.CANCELLED)
|
2016-01-18 14:23:55 +00:00
|
|
|
|
|
|
|
recent_builds = list(itertools.chain(
|
|
|
|
builds.filter(outcome=Build.IN_PROGRESS).order_by("-started_on"),
|
|
|
|
builds.filter(finished_criteria).order_by("-completed_on")[:3]
|
|
|
|
))
|
|
|
|
|
|
|
|
# add percentage done property to each build; this is used
|
|
|
|
# to show build progress in mrb_section.html
|
|
|
|
for build in recent_builds:
|
|
|
|
build.percentDone = build.completeper()
|
2016-05-13 16:02:58 +00:00
|
|
|
build.outcomeText = build.get_outcome_text()
|
2016-01-18 14:23:55 +00:00
|
|
|
|
|
|
|
return recent_builds
|
|
|
|
|
2016-07-13 13:39:47 +00:00
|
|
|
def started(self):
|
|
|
|
"""
|
|
|
|
As build variables are only added for a build when its BuildStarted event
|
|
|
|
is received, a build with no build variables is counted as
|
|
|
|
"in preparation" and not properly started yet. This method
|
|
|
|
will return False if a build has no build variables (it never properly
|
|
|
|
started), or True otherwise.
|
|
|
|
|
|
|
|
Note that this is a temporary workaround for the fact that we don't
|
|
|
|
have a fine-grained state variable on a build which would allow us
|
|
|
|
to record "in progress" (BuildStarted received) vs. "in preparation".
|
|
|
|
"""
|
|
|
|
variables = Variable.objects.filter(build=self)
|
|
|
|
return len(variables) > 0
|
|
|
|
|
2014-08-27 16:24:42 +00:00
|
|
|
def completeper(self):
|
|
|
|
tf = Task.objects.filter(build = self)
|
|
|
|
tfc = tf.count()
|
|
|
|
if tfc > 0:
|
2016-05-30 12:51:20 +00:00
|
|
|
completeper = tf.exclude(order__isnull=True).count()*100 // tfc
|
2014-08-27 16:24:42 +00:00
|
|
|
else:
|
|
|
|
completeper = 0
|
|
|
|
return completeper
|
|
|
|
|
|
|
|
def eta(self):
|
2014-09-09 10:47:13 +00:00
|
|
|
eta = timezone.now()
|
2014-08-27 16:24:42 +00:00
|
|
|
completeper = self.completeper()
|
|
|
|
if self.completeper() > 0:
|
2015-02-24 17:21:00 +00:00
|
|
|
eta += ((eta - self.started_on)*(100-completeper))/completeper
|
2014-08-27 16:24:42 +00:00
|
|
|
return eta
|
|
|
|
|
2016-07-12 22:54:50 +00:00
|
|
|
def has_images(self):
|
|
|
|
"""
|
|
|
|
Returns True if at least one of the targets for this build has an
|
|
|
|
image file associated with it, False otherwise
|
|
|
|
"""
|
|
|
|
targets = Target.objects.filter(build_id=self.id)
|
|
|
|
has_images = False
|
|
|
|
for target in targets:
|
|
|
|
if target.has_images():
|
|
|
|
has_images = True
|
|
|
|
break
|
|
|
|
return has_images
|
|
|
|
|
2016-07-12 22:54:55 +00:00
|
|
|
def has_image_recipes(self):
|
2016-07-12 22:54:51 +00:00
|
|
|
"""
|
|
|
|
Returns True if a build has any targets which were built from
|
|
|
|
image recipes.
|
|
|
|
"""
|
2016-07-12 22:54:55 +00:00
|
|
|
image_recipes = self.get_image_recipes()
|
|
|
|
return len(image_recipes) > 0
|
2016-07-12 22:54:51 +00:00
|
|
|
|
2016-01-15 11:00:46 +00:00
|
|
|
def get_image_file_extensions(self):
|
|
|
|
"""
|
2016-07-12 22:54:50 +00:00
|
|
|
Get string of file name extensions for images produced by this build;
|
2016-07-12 22:54:46 +00:00
|
|
|
note that this is the actual list of extensions stored on Target objects
|
|
|
|
for this build, and not the value of IMAGE_FSTYPES.
|
2016-07-12 22:54:50 +00:00
|
|
|
|
|
|
|
Returns comma-separated string, e.g. "vmdk, ext4"
|
2016-01-15 11:00:46 +00:00
|
|
|
"""
|
|
|
|
extensions = []
|
|
|
|
|
2016-07-12 22:54:45 +00:00
|
|
|
targets = Target.objects.filter(build_id = self.id)
|
2016-01-15 11:00:46 +00:00
|
|
|
for target in targets:
|
2016-07-12 22:54:50 +00:00
|
|
|
if not target.is_image:
|
2016-01-15 11:00:46 +00:00
|
|
|
continue
|
|
|
|
|
2016-07-12 22:54:45 +00:00
|
|
|
target_image_files = Target_Image_File.objects.filter(
|
|
|
|
target_id=target.id)
|
2016-01-15 11:00:46 +00:00
|
|
|
|
|
|
|
for target_image_file in target_image_files:
|
2016-07-12 22:54:45 +00:00
|
|
|
extensions.append(target_image_file.suffix)
|
|
|
|
|
|
|
|
extensions = list(set(extensions))
|
|
|
|
extensions.sort()
|
2016-01-15 11:00:46 +00:00
|
|
|
|
|
|
|
return ', '.join(extensions)
|
2014-08-27 16:24:42 +00:00
|
|
|
|
2016-07-12 22:54:46 +00:00
|
|
|
def get_image_fstypes(self):
|
|
|
|
"""
|
|
|
|
Get the IMAGE_FSTYPES variable value for this build as a de-duplicated
|
|
|
|
list of image file suffixes.
|
|
|
|
"""
|
|
|
|
image_fstypes = Variable.objects.get(
|
|
|
|
build=self, variable_name='IMAGE_FSTYPES').variable_value
|
|
|
|
return list(set(re.split(r' {1,}', image_fstypes)))
|
|
|
|
|
2014-03-21 12:35:50 +00:00
|
|
|
def get_sorted_target_list(self):
|
|
|
|
tgts = Target.objects.filter(build_id = self.id).order_by( 'target' );
|
|
|
|
return( tgts );
|
|
|
|
|
2016-04-19 16:28:45 +00:00
|
|
|
def get_recipes(self):
|
|
|
|
"""
|
|
|
|
Get the recipes related to this build;
|
|
|
|
note that the related layer versions and layers are also prefetched
|
|
|
|
by this query, as this queryset can be sorted by these objects in the
|
|
|
|
build recipes view; prefetching them here removes the need
|
|
|
|
for another query in that view
|
|
|
|
"""
|
|
|
|
layer_versions = Layer_Version.objects.filter(build=self)
|
|
|
|
criteria = Q(layer_version__id__in=layer_versions)
|
|
|
|
return Recipe.objects.filter(criteria) \
|
|
|
|
.select_related('layer_version', 'layer_version__layer')
|
|
|
|
|
|
|
|
def get_image_recipes(self):
|
|
|
|
"""
|
2016-04-19 16:28:46 +00:00
|
|
|
Returns a list of image Recipes (custom and built-in) related to this
|
|
|
|
build, sorted by name; note that this has to be done in two steps, as
|
|
|
|
there's no way to get all the custom image recipes and image recipes
|
|
|
|
in one query
|
2016-04-19 16:28:45 +00:00
|
|
|
"""
|
2016-04-19 16:28:46 +00:00
|
|
|
custom_image_recipes = self.get_custom_image_recipes()
|
|
|
|
custom_image_recipe_names = custom_image_recipes.values_list('name', flat=True)
|
|
|
|
|
|
|
|
not_custom_image_recipes = ~Q(name__in=custom_image_recipe_names) & \
|
|
|
|
Q(is_image=True)
|
|
|
|
|
|
|
|
built_image_recipes = self.get_recipes().filter(not_custom_image_recipes)
|
|
|
|
|
|
|
|
# append to the custom image recipes and sort
|
|
|
|
customisable_image_recipes = list(
|
|
|
|
itertools.chain(custom_image_recipes, built_image_recipes)
|
|
|
|
)
|
|
|
|
|
|
|
|
return sorted(customisable_image_recipes, key=lambda recipe: recipe.name)
|
2016-04-19 16:28:45 +00:00
|
|
|
|
|
|
|
def get_custom_image_recipes(self):
|
|
|
|
"""
|
2016-04-19 16:28:46 +00:00
|
|
|
Returns a queryset of CustomImageRecipes related to this build,
|
2016-04-19 16:28:45 +00:00
|
|
|
sorted by name
|
|
|
|
"""
|
2016-04-19 16:28:46 +00:00
|
|
|
built_recipe_names = self.get_recipes().values_list('name', flat=True)
|
|
|
|
criteria = Q(name__in=built_recipe_names) & Q(project=self.project)
|
|
|
|
queryset = CustomImageRecipe.objects.filter(criteria).order_by('name')
|
|
|
|
return queryset
|
2016-04-19 16:28:45 +00:00
|
|
|
|
2015-10-14 14:43:44 +00:00
|
|
|
def get_outcome_text(self):
|
|
|
|
return Build.BUILD_OUTCOME[int(self.outcome)][1]
|
|
|
|
|
2016-01-15 11:00:49 +00:00
|
|
|
@property
|
|
|
|
def failed_tasks(self):
|
|
|
|
""" Get failed tasks for the build """
|
|
|
|
tasks = self.task_build.all()
|
|
|
|
return tasks.filter(order__gt=0, outcome=Task.OUTCOME_FAILED)
|
|
|
|
|
2015-06-17 16:30:34 +00:00
|
|
|
@property
|
|
|
|
def errors(self):
|
2015-10-15 12:45:13 +00:00
|
|
|
return (self.logmessage_set.filter(level=LogMessage.ERROR) |
|
|
|
|
self.logmessage_set.filter(level=LogMessage.EXCEPTION) |
|
|
|
|
self.logmessage_set.filter(level=LogMessage.CRITICAL))
|
2015-06-17 16:30:34 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def warnings(self):
|
|
|
|
return self.logmessage_set.filter(level=LogMessage.WARNING)
|
|
|
|
|
2016-01-15 11:00:49 +00:00
|
|
|
@property
|
|
|
|
def timespent(self):
|
|
|
|
return self.completed_on - self.started_on
|
|
|
|
|
2015-06-17 16:30:34 +00:00
|
|
|
@property
|
|
|
|
def timespent_seconds(self):
|
2016-01-15 11:00:49 +00:00
|
|
|
return self.timespent.total_seconds()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def target_labels(self):
|
|
|
|
"""
|
|
|
|
Sorted (a-z) "target1:task, target2, target3" etc. string for all
|
|
|
|
targets in this build
|
|
|
|
"""
|
|
|
|
targets = self.target_set.all()
|
2016-01-15 11:01:06 +00:00
|
|
|
target_labels = [target.target +
|
|
|
|
(':' + target.task if target.task else '')
|
|
|
|
for target in targets]
|
2016-01-15 11:00:49 +00:00
|
|
|
target_labels.sort()
|
|
|
|
|
|
|
|
return target_labels
|
2015-06-11 17:27:53 +00:00
|
|
|
|
bitbake: toaster: move most recent builds templating to client
The most recent builds area of the all builds and project builds
table needs to update as a build progresses. It also needs
additional functionality to show other states (e.g. recipe parsing,
queued) which again needs to update on the client side.
Rather than add to the existing mix of server-side templating
with client-side DOM updating, translate all of the server-side
templates to client-side ones (jsrender), and add logic which
updates the most recent builds area as the state of a build changes.
Add a JSON API for mostrecentbuilds, which returns the state of
all "recent" builds. Fetch this via Ajax from the build dashboard
(rather than fetching the ad hoc API as in the previous version).
Then, as new states for builds are fetched via Ajax, determine
whether the build state has changed completely, or whether the progress
has just updated. If the state completely changed, re-render the
template on the client side for that build. If only the progress
changed, just update the progress bar. (NB this fixes the
task progress bar so it works for the project builds and all builds
pages.)
In cases where the builds table needs to update as the result of
a build finishing, reload the whole page.
This work highlighted a variety of other issues, such as
build requests not being able to change state as necessary. This
was one part of the cause of the "cancelling build..." state
being fragile and disappearing entirely when the page refreshed.
The cancelling state now persists between page reloads, as the
logic for determining whether a build is cancelling is now on
the Build object itself.
Note that jsrender is redistributed as part of Toaster, so
a note was added to LICENSE to that effect.
[YOCTO #9631]
(Bitbake rev: c868ea036aa34b387a72ec5116a66b2cd863995b)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-06-29 14:41:56 +00:00
|
|
|
def get_buildrequest(self):
|
|
|
|
buildrequest = None
|
|
|
|
if hasattr(self, 'buildrequest'):
|
|
|
|
buildrequest = self.buildrequest
|
|
|
|
return buildrequest
|
2015-10-14 14:43:44 +00:00
|
|
|
|
bitbake: toaster: move most recent builds templating to client
The most recent builds area of the all builds and project builds
table needs to update as a build progresses. It also needs
additional functionality to show other states (e.g. recipe parsing,
queued) which again needs to update on the client side.
Rather than add to the existing mix of server-side templating
with client-side DOM updating, translate all of the server-side
templates to client-side ones (jsrender), and add logic which
updates the most recent builds area as the state of a build changes.
Add a JSON API for mostrecentbuilds, which returns the state of
all "recent" builds. Fetch this via Ajax from the build dashboard
(rather than fetching the ad hoc API as in the previous version).
Then, as new states for builds are fetched via Ajax, determine
whether the build state has changed completely, or whether the progress
has just updated. If the state completely changed, re-render the
template on the client side for that build. If only the progress
changed, just update the progress bar. (NB this fixes the
task progress bar so it works for the project builds and all builds
pages.)
In cases where the builds table needs to update as the result of
a build finishing, reload the whole page.
This work highlighted a variety of other issues, such as
build requests not being able to change state as necessary. This
was one part of the cause of the "cancelling build..." state
being fragile and disappearing entirely when the page refreshed.
The cancelling state now persists between page reloads, as the
logic for determining whether a build is cancelling is now on
the Build object itself.
Note that jsrender is redistributed as part of Toaster, so
a note was added to LICENSE to that effect.
[YOCTO #9631]
(Bitbake rev: c868ea036aa34b387a72ec5116a66b2cd863995b)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-06-29 14:41:56 +00:00
|
|
|
def is_queued(self):
|
2015-06-17 11:27:48 +00:00
|
|
|
from bldcontrol.models import BuildRequest
|
bitbake: toaster: move most recent builds templating to client
The most recent builds area of the all builds and project builds
table needs to update as a build progresses. It also needs
additional functionality to show other states (e.g. recipe parsing,
queued) which again needs to update on the client side.
Rather than add to the existing mix of server-side templating
with client-side DOM updating, translate all of the server-side
templates to client-side ones (jsrender), and add logic which
updates the most recent builds area as the state of a build changes.
Add a JSON API for mostrecentbuilds, which returns the state of
all "recent" builds. Fetch this via Ajax from the build dashboard
(rather than fetching the ad hoc API as in the previous version).
Then, as new states for builds are fetched via Ajax, determine
whether the build state has changed completely, or whether the progress
has just updated. If the state completely changed, re-render the
template on the client side for that build. If only the progress
changed, just update the progress bar. (NB this fixes the
task progress bar so it works for the project builds and all builds
pages.)
In cases where the builds table needs to update as the result of
a build finishing, reload the whole page.
This work highlighted a variety of other issues, such as
build requests not being able to change state as necessary. This
was one part of the cause of the "cancelling build..." state
being fragile and disappearing entirely when the page refreshed.
The cancelling state now persists between page reloads, as the
logic for determining whether a build is cancelling is now on
the Build object itself.
Note that jsrender is redistributed as part of Toaster, so
a note was added to LICENSE to that effect.
[YOCTO #9631]
(Bitbake rev: c868ea036aa34b387a72ec5116a66b2cd863995b)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-06-29 14:41:56 +00:00
|
|
|
buildrequest = self.get_buildrequest()
|
|
|
|
if buildrequest:
|
|
|
|
return buildrequest.state == BuildRequest.REQ_QUEUED
|
|
|
|
else:
|
|
|
|
return False
|
2015-10-14 14:43:44 +00:00
|
|
|
|
bitbake: toaster: move most recent builds templating to client
The most recent builds area of the all builds and project builds
table needs to update as a build progresses. It also needs
additional functionality to show other states (e.g. recipe parsing,
queued) which again needs to update on the client side.
Rather than add to the existing mix of server-side templating
with client-side DOM updating, translate all of the server-side
templates to client-side ones (jsrender), and add logic which
updates the most recent builds area as the state of a build changes.
Add a JSON API for mostrecentbuilds, which returns the state of
all "recent" builds. Fetch this via Ajax from the build dashboard
(rather than fetching the ad hoc API as in the previous version).
Then, as new states for builds are fetched via Ajax, determine
whether the build state has changed completely, or whether the progress
has just updated. If the state completely changed, re-render the
template on the client side for that build. If only the progress
changed, just update the progress bar. (NB this fixes the
task progress bar so it works for the project builds and all builds
pages.)
In cases where the builds table needs to update as the result of
a build finishing, reload the whole page.
This work highlighted a variety of other issues, such as
build requests not being able to change state as necessary. This
was one part of the cause of the "cancelling build..." state
being fragile and disappearing entirely when the page refreshed.
The cancelling state now persists between page reloads, as the
logic for determining whether a build is cancelling is now on
the Build object itself.
Note that jsrender is redistributed as part of Toaster, so
a note was added to LICENSE to that effect.
[YOCTO #9631]
(Bitbake rev: c868ea036aa34b387a72ec5116a66b2cd863995b)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-06-29 14:41:56 +00:00
|
|
|
def is_cancelling(self):
|
|
|
|
from bldcontrol.models import BuildRequest
|
|
|
|
buildrequest = self.get_buildrequest()
|
|
|
|
if buildrequest:
|
|
|
|
return self.outcome == Build.IN_PROGRESS and \
|
|
|
|
buildrequest.state == BuildRequest.REQ_CANCELLING
|
|
|
|
else:
|
|
|
|
return False
|
2015-10-14 14:43:44 +00:00
|
|
|
|
2016-07-11 13:47:06 +00:00
|
|
|
def is_parsing(self):
|
|
|
|
"""
|
|
|
|
True if the build is still parsing recipes
|
|
|
|
"""
|
|
|
|
return self.outcome == Build.IN_PROGRESS and \
|
|
|
|
self.recipes_parsed < self.recipes_to_parse
|
|
|
|
|
2016-07-12 15:14:42 +00:00
|
|
|
def is_starting(self):
|
|
|
|
"""
|
|
|
|
True if the build has no completed tasks yet and is still just starting
|
|
|
|
tasks.
|
|
|
|
|
|
|
|
Note that the mechanism for testing whether a Task is "done" is whether
|
|
|
|
its order field is set, as per the completeper() method.
|
|
|
|
"""
|
|
|
|
return self.outcome == Build.IN_PROGRESS and \
|
|
|
|
self.task_build.filter(order__isnull=False).count() == 0
|
|
|
|
|
bitbake: toaster: move most recent builds templating to client
The most recent builds area of the all builds and project builds
table needs to update as a build progresses. It also needs
additional functionality to show other states (e.g. recipe parsing,
queued) which again needs to update on the client side.
Rather than add to the existing mix of server-side templating
with client-side DOM updating, translate all of the server-side
templates to client-side ones (jsrender), and add logic which
updates the most recent builds area as the state of a build changes.
Add a JSON API for mostrecentbuilds, which returns the state of
all "recent" builds. Fetch this via Ajax from the build dashboard
(rather than fetching the ad hoc API as in the previous version).
Then, as new states for builds are fetched via Ajax, determine
whether the build state has changed completely, or whether the progress
has just updated. If the state completely changed, re-render the
template on the client side for that build. If only the progress
changed, just update the progress bar. (NB this fixes the
task progress bar so it works for the project builds and all builds
pages.)
In cases where the builds table needs to update as the result of
a build finishing, reload the whole page.
This work highlighted a variety of other issues, such as
build requests not being able to change state as necessary. This
was one part of the cause of the "cancelling build..." state
being fragile and disappearing entirely when the page refreshed.
The cancelling state now persists between page reloads, as the
logic for determining whether a build is cancelling is now on
the Build object itself.
Note that jsrender is redistributed as part of Toaster, so
a note was added to LICENSE to that effect.
[YOCTO #9631]
(Bitbake rev: c868ea036aa34b387a72ec5116a66b2cd863995b)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-06-29 14:41:56 +00:00
|
|
|
def get_state(self):
|
|
|
|
"""
|
|
|
|
Get the state of the build; one of 'Succeeded', 'Failed', 'In Progress',
|
|
|
|
'Cancelled' (Build outcomes); or 'Queued', 'Cancelling' (states
|
|
|
|
dependent on the BuildRequest state).
|
|
|
|
|
|
|
|
This works around the fact that we have BuildRequest states as well
|
|
|
|
as Build states, but really we just want to know the state of the build.
|
|
|
|
"""
|
|
|
|
if self.is_cancelling():
|
|
|
|
return 'Cancelling';
|
|
|
|
elif self.is_queued():
|
|
|
|
return 'Queued'
|
2016-07-11 13:47:06 +00:00
|
|
|
elif self.is_parsing():
|
|
|
|
return 'Parsing'
|
2016-07-12 15:14:42 +00:00
|
|
|
elif self.is_starting():
|
|
|
|
return 'Starting'
|
2015-10-14 14:43:44 +00:00
|
|
|
else:
|
|
|
|
return self.get_outcome_text()
|
2015-06-11 17:27:53 +00:00
|
|
|
|
2015-05-19 12:13:27 +00:00
|
|
|
def __str__(self):
|
2016-01-15 11:00:46 +00:00
|
|
|
return "%d %s %s" % (self.id, self.project, ",".join([t.target for t in self.target_set.all()]))
|
2015-05-19 12:13:27 +00:00
|
|
|
|
2014-06-03 15:26:17 +00:00
|
|
|
class ProjectTarget(models.Model):
|
|
|
|
project = models.ForeignKey(Project)
|
|
|
|
target = models.CharField(max_length=100)
|
2014-06-30 17:33:04 +00:00
|
|
|
task = models.CharField(max_length=100, null=True)
|
2014-03-21 12:35:50 +00:00
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Target(models.Model):
|
2014-02-20 12:47:55 +00:00
|
|
|
search_allowed_fields = ['target', 'file_name']
|
2013-10-11 12:46:23 +00:00
|
|
|
build = models.ForeignKey(Build)
|
|
|
|
target = models.CharField(max_length=100)
|
2015-06-11 17:27:53 +00:00
|
|
|
task = models.CharField(max_length=100, null=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
is_image = models.BooleanField(default = False)
|
2014-03-06 18:22:38 +00:00
|
|
|
image_size = models.IntegerField(default=0)
|
|
|
|
license_manifest_path = models.CharField(max_length=500, null=True)
|
2016-07-12 22:54:54 +00:00
|
|
|
package_manifest_path = models.CharField(max_length=500, null=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2014-03-12 21:54:09 +00:00
|
|
|
def package_count(self):
|
|
|
|
return Target_Installed_Package.objects.filter(target_id__exact=self.id).count()
|
|
|
|
|
2014-11-14 17:07:06 +00:00
|
|
|
def __unicode__(self):
|
2013-10-11 12:46:23 +00:00
|
|
|
return self.target
|
|
|
|
|
2016-07-12 22:54:46 +00:00
|
|
|
def get_similar_targets(self):
|
|
|
|
"""
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
Get target sfor the same machine, task and target name
|
2016-07-12 22:54:46 +00:00
|
|
|
(e.g. 'core-image-minimal') from a successful build for this project
|
|
|
|
(but excluding this target).
|
|
|
|
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
Note that we only look for targets built by this project because
|
|
|
|
projects can have different configurations from each other, and put
|
|
|
|
their artifacts in different directories.
|
|
|
|
|
|
|
|
The possibility of error when retrieving candidate targets
|
|
|
|
is minimised by the fact that bitbake will rebuild artifacts if MACHINE
|
|
|
|
(or various other variables) change. In this case, there is no need to
|
|
|
|
clone artifacts from another target, as those artifacts will have
|
|
|
|
been re-generated for this target anyway.
|
2016-07-12 22:54:46 +00:00
|
|
|
"""
|
|
|
|
query = ~Q(pk=self.pk) & \
|
|
|
|
Q(target=self.target) & \
|
|
|
|
Q(build__machine=self.build.machine) & \
|
|
|
|
Q(build__outcome=Build.SUCCEEDED) & \
|
|
|
|
Q(build__project=self.build.project)
|
|
|
|
|
|
|
|
return Target.objects.filter(query)
|
|
|
|
|
|
|
|
def get_similar_target_with_image_files(self):
|
|
|
|
"""
|
|
|
|
Get the most recent similar target with Target_Image_Files associated
|
|
|
|
with it, for the purpose of cloning those files onto this target.
|
|
|
|
"""
|
|
|
|
similar_target = None
|
|
|
|
|
|
|
|
candidates = self.get_similar_targets()
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
if candidates.count() == 0:
|
2016-07-12 22:54:46 +00:00
|
|
|
return similar_target
|
|
|
|
|
|
|
|
task_subquery = Q(task=self.task)
|
|
|
|
|
|
|
|
# we can look for a 'build' task if this task is a 'populate_sdk_ext'
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
# task, as the latter also creates images; and vice versa; note that
|
2016-07-12 22:54:46 +00:00
|
|
|
# 'build' targets can have their task set to '';
|
|
|
|
# also note that 'populate_sdk' does not produce image files
|
|
|
|
image_tasks = [
|
|
|
|
'', # aka 'build'
|
|
|
|
'build',
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
'image',
|
2016-07-12 22:54:46 +00:00
|
|
|
'populate_sdk_ext'
|
|
|
|
]
|
|
|
|
if self.task in image_tasks:
|
|
|
|
task_subquery = Q(task__in=image_tasks)
|
|
|
|
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
# annotate with the count of files, to exclude any targets which
|
|
|
|
# don't have associated files
|
|
|
|
candidates = candidates.annotate(num_files=Count('target_image_file'))
|
|
|
|
|
2016-07-12 22:54:46 +00:00
|
|
|
query = task_subquery & Q(num_files__gt=0)
|
|
|
|
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
candidates = candidates.filter(query)
|
|
|
|
|
|
|
|
if candidates.count() > 0:
|
|
|
|
candidates.order_by('build__completed_on')
|
|
|
|
similar_target = candidates.last()
|
|
|
|
|
|
|
|
return similar_target
|
|
|
|
|
|
|
|
def get_similar_target_with_sdk_files(self):
|
|
|
|
"""
|
|
|
|
Get the most recent similar target with TargetSDKFiles associated
|
|
|
|
with it, for the purpose of cloning those files onto this target.
|
|
|
|
"""
|
|
|
|
similar_target = None
|
|
|
|
|
|
|
|
candidates = self.get_similar_targets()
|
|
|
|
if candidates.count() == 0:
|
|
|
|
return similar_target
|
|
|
|
|
2016-07-12 22:54:46 +00:00
|
|
|
# annotate with the count of files, to exclude any targets which
|
|
|
|
# don't have associated files
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
candidates = candidates.annotate(num_files=Count('targetsdkfile'))
|
|
|
|
|
|
|
|
query = Q(task=self.task) & Q(num_files__gt=0)
|
2016-07-12 22:54:46 +00:00
|
|
|
|
|
|
|
candidates = candidates.filter(query)
|
|
|
|
|
|
|
|
if candidates.count() > 0:
|
|
|
|
candidates.order_by('build__completed_on')
|
|
|
|
similar_target = candidates.last()
|
|
|
|
|
|
|
|
return similar_target
|
|
|
|
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
def clone_image_artifacts_from(self, target):
|
2016-07-12 22:54:46 +00:00
|
|
|
"""
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
Make clones of the Target_Image_Files and TargetKernelFile objects
|
|
|
|
associated with Target target, then associate them with this target.
|
2016-07-12 22:54:46 +00:00
|
|
|
|
|
|
|
Note that for Target_Image_Files, we only want files from the previous
|
|
|
|
build whose suffix matches one of the suffixes defined in this
|
|
|
|
target's build's IMAGE_FSTYPES configuration variable. This prevents the
|
|
|
|
Target_Image_File object for an ext4 image being associated with a
|
|
|
|
target for a project which didn't produce an ext4 image (for example).
|
|
|
|
|
2016-07-12 22:54:54 +00:00
|
|
|
Also sets the license_manifest_path and package_manifest_path
|
|
|
|
of this target to the same path as that of target being cloned from, as
|
|
|
|
the manifests are also build artifacts but are treated differently.
|
2016-07-12 22:54:46 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
image_fstypes = self.build.get_image_fstypes()
|
|
|
|
|
|
|
|
# filter out any image files whose suffixes aren't in the
|
|
|
|
# IMAGE_FSTYPES suffixes variable for this target's build
|
|
|
|
image_files = [target_image_file \
|
|
|
|
for target_image_file in target.target_image_file_set.all() \
|
|
|
|
if target_image_file.suffix in image_fstypes]
|
|
|
|
|
|
|
|
for image_file in image_files:
|
|
|
|
image_file.pk = None
|
|
|
|
image_file.target = self
|
|
|
|
image_file.save()
|
|
|
|
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
kernel_files = target.targetkernelfile_set.all()
|
|
|
|
for kernel_file in kernel_files:
|
|
|
|
kernel_file.pk = None
|
|
|
|
kernel_file.target = self
|
|
|
|
kernel_file.save()
|
2016-07-12 22:54:46 +00:00
|
|
|
|
|
|
|
self.license_manifest_path = target.license_manifest_path
|
2016-07-12 22:54:54 +00:00
|
|
|
self.package_manifest_path = target.package_manifest_path
|
2016-07-12 22:54:46 +00:00
|
|
|
self.save()
|
|
|
|
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
def clone_sdk_artifacts_from(self, target):
|
|
|
|
"""
|
|
|
|
Clone TargetSDKFile objects from target and associate them with this
|
|
|
|
target.
|
|
|
|
"""
|
|
|
|
sdk_files = target.targetsdkfile_set.all()
|
|
|
|
for sdk_file in sdk_files:
|
|
|
|
sdk_file.pk = None
|
|
|
|
sdk_file.target = self
|
|
|
|
sdk_file.save()
|
|
|
|
|
2016-07-12 22:54:50 +00:00
|
|
|
def has_images(self):
|
|
|
|
"""
|
|
|
|
Returns True if this target has one or more image files attached to it.
|
|
|
|
"""
|
|
|
|
return self.target_image_file_set.all().count() > 0
|
|
|
|
|
bitbake: toaster: improve scan for SDK artifacts
SDK artifacts were previously picked up by toaster.bbclass and
notified to buildinfohelper (via toasterui). The artifacts
were then added to the Build object, so that it wasn't clear
which artifact went with which target; we were also unable
to attach SDK artifacts to a Build if they had already been
attached to a previous build.
Now, toaster.bbclass just notifies the TOOLCHAIN_OUTPUTNAME when
a populate_sdk* target completes. The scan is moved to buildinfohelper,
where we search the SDK deploy directory for files matching
TOOLCHAIN_OUTPUTNAME and attach them to targets (not builds).
If an SDK file is not produced by a target, we now look for a
similar, previously-run target which did produce artifacts.
If there is one, we clone the SDK artifacts from that target
onto the current one.
This all means that we can show SDK artifacts by target, and should
always get artifacts associated with a target, regardless of whether
it really build them.
This requires an additional model, TargetSDKFile, which tracks
the size and path of SDK artifact files with respect to Target
objects.
[YOCTO #8556]
(Bitbake rev: 5e650c611605507e1e0d1588cd5eb6535c2d34fc)
Signed-off-by: Elliot Smith <elliot.smith@intel.com>
Signed-off-by: bavery <brian.avery@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2016-07-12 22:54:48 +00:00
|
|
|
# kernel artifacts for a target: bzImage and modules*
|
|
|
|
class TargetKernelFile(models.Model):
|
|
|
|
target = models.ForeignKey(Target)
|
|
|
|
file_name = models.FilePathField()
|
|
|
|
file_size = models.IntegerField()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def basename(self):
|
|
|
|
return os.path.basename(self.file_name)
|
|
|
|
|
|
|
|
# SDK artifacts for a target: sh and manifest files
|
|
|
|
class TargetSDKFile(models.Model):
|
2016-07-12 22:54:46 +00:00
|
|
|
target = models.ForeignKey(Target)
|
|
|
|
file_name = models.FilePathField()
|
|
|
|
file_size = models.IntegerField()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def basename(self):
|
|
|
|
return os.path.basename(self.file_name)
|
|
|
|
|
2014-02-20 12:47:55 +00:00
|
|
|
class Target_Image_File(models.Model):
|
2016-01-15 11:00:45 +00:00
|
|
|
# valid suffixes for image files produced by a build
|
|
|
|
SUFFIXES = {
|
|
|
|
'btrfs', 'cpio', 'cpio.gz', 'cpio.lz4', 'cpio.lzma', 'cpio.xz',
|
|
|
|
'cramfs', 'elf', 'ext2', 'ext2.bz2', 'ext2.gz', 'ext2.lzma', 'ext4',
|
2016-09-05 14:29:29 +00:00
|
|
|
'ext4.gz', 'ext3', 'ext3.gz', 'hdddirect', 'hddimg', 'iso', 'jffs2',
|
|
|
|
'jffs2.sum', 'multiubi', 'qcow2', 'squashfs', 'squashfs-lzo',
|
|
|
|
'squashfs-xz', 'tar', 'tar.bz2', 'tar.gz', 'tar.lz4', 'tar.xz', 'ubi',
|
|
|
|
'ubifs', 'vdi', 'vmdk', 'wic', 'wic.bz2', 'wic.gz', 'wic.lzma'
|
2016-01-15 11:00:45 +00:00
|
|
|
}
|
|
|
|
|
2014-02-20 12:47:55 +00:00
|
|
|
target = models.ForeignKey(Target)
|
2014-04-28 14:11:03 +00:00
|
|
|
file_name = models.FilePathField(max_length=254)
|
2014-02-20 12:47:55 +00:00
|
|
|
file_size = models.IntegerField()
|
|
|
|
|
2016-02-23 12:17:04 +00:00
|
|
|
@property
|
|
|
|
def suffix(self):
|
2016-07-12 22:54:50 +00:00
|
|
|
"""
|
|
|
|
Suffix for image file, minus leading "."
|
|
|
|
"""
|
2016-07-12 22:54:45 +00:00
|
|
|
for suffix in Target_Image_File.SUFFIXES:
|
|
|
|
if self.file_name.endswith(suffix):
|
|
|
|
return suffix
|
|
|
|
|
2016-02-23 12:17:04 +00:00
|
|
|
filename, suffix = os.path.splitext(self.file_name)
|
|
|
|
suffix = suffix.lstrip('.')
|
|
|
|
return suffix
|
|
|
|
|
2014-02-20 12:47:55 +00:00
|
|
|
class Target_File(models.Model):
|
|
|
|
ITYPE_REGULAR = 1
|
|
|
|
ITYPE_DIRECTORY = 2
|
|
|
|
ITYPE_SYMLINK = 3
|
|
|
|
ITYPE_SOCKET = 4
|
|
|
|
ITYPE_FIFO = 5
|
|
|
|
ITYPE_CHARACTER = 6
|
|
|
|
ITYPE_BLOCK = 7
|
|
|
|
ITYPES = ( (ITYPE_REGULAR ,'regular'),
|
|
|
|
( ITYPE_DIRECTORY ,'directory'),
|
|
|
|
( ITYPE_SYMLINK ,'symlink'),
|
|
|
|
( ITYPE_SOCKET ,'socket'),
|
|
|
|
( ITYPE_FIFO ,'fifo'),
|
|
|
|
( ITYPE_CHARACTER ,'character'),
|
|
|
|
( ITYPE_BLOCK ,'block'),
|
2014-03-06 18:22:38 +00:00
|
|
|
)
|
2014-02-20 12:47:55 +00:00
|
|
|
|
|
|
|
target = models.ForeignKey(Target)
|
|
|
|
path = models.FilePathField()
|
|
|
|
size = models.IntegerField()
|
|
|
|
inodetype = models.IntegerField(choices = ITYPES)
|
2014-03-06 18:22:38 +00:00
|
|
|
permission = models.CharField(max_length=16)
|
2014-02-20 12:47:55 +00:00
|
|
|
owner = models.CharField(max_length=128)
|
|
|
|
group = models.CharField(max_length=128)
|
2014-03-06 18:22:38 +00:00
|
|
|
directory = models.ForeignKey('Target_File', related_name="directory_set", null=True)
|
|
|
|
sym_target = models.ForeignKey('Target_File', related_name="symlink_set", null=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2014-01-14 12:35:12 +00:00
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Task(models.Model):
|
|
|
|
|
|
|
|
SSTATE_NA = 0
|
|
|
|
SSTATE_MISS = 1
|
|
|
|
SSTATE_FAILED = 2
|
|
|
|
SSTATE_RESTORED = 3
|
|
|
|
|
|
|
|
SSTATE_RESULT = (
|
|
|
|
(SSTATE_NA, 'Not Applicable'), # For rest of tasks, but they still need checking.
|
2014-02-18 12:08:40 +00:00
|
|
|
(SSTATE_MISS, 'File not in cache'), # the sstate object was not found
|
2013-10-11 12:46:23 +00:00
|
|
|
(SSTATE_FAILED, 'Failed'), # there was a pkg, but the script failed
|
2014-02-14 22:58:20 +00:00
|
|
|
(SSTATE_RESTORED, 'Succeeded'), # successfully restored
|
2013-10-11 12:46:23 +00:00
|
|
|
)
|
|
|
|
|
2013-11-14 10:52:58 +00:00
|
|
|
CODING_NA = 0
|
|
|
|
CODING_PYTHON = 2
|
|
|
|
CODING_SHELL = 3
|
2013-10-11 12:46:23 +00:00
|
|
|
|
|
|
|
TASK_CODING = (
|
2013-11-14 10:52:58 +00:00
|
|
|
(CODING_NA, 'N/A'),
|
2013-10-11 12:46:23 +00:00
|
|
|
(CODING_PYTHON, 'Python'),
|
|
|
|
(CODING_SHELL, 'Shell'),
|
|
|
|
)
|
|
|
|
|
2014-02-13 13:12:39 +00:00
|
|
|
OUTCOME_NA = -1
|
2013-10-11 12:46:23 +00:00
|
|
|
OUTCOME_SUCCESS = 0
|
|
|
|
OUTCOME_COVERED = 1
|
2014-01-14 12:35:12 +00:00
|
|
|
OUTCOME_CACHED = 2
|
|
|
|
OUTCOME_PREBUILT = 3
|
2013-10-11 12:46:23 +00:00
|
|
|
OUTCOME_FAILED = 4
|
2014-02-13 13:12:39 +00:00
|
|
|
OUTCOME_EMPTY = 5
|
2013-10-11 12:46:23 +00:00
|
|
|
|
|
|
|
TASK_OUTCOME = (
|
2014-02-13 13:12:39 +00:00
|
|
|
(OUTCOME_NA, 'Not Available'),
|
2013-10-11 12:46:23 +00:00
|
|
|
(OUTCOME_SUCCESS, 'Succeeded'),
|
|
|
|
(OUTCOME_COVERED, 'Covered'),
|
2014-01-14 12:35:12 +00:00
|
|
|
(OUTCOME_CACHED, 'Cached'),
|
|
|
|
(OUTCOME_PREBUILT, 'Prebuilt'),
|
2013-10-11 12:46:23 +00:00
|
|
|
(OUTCOME_FAILED, 'Failed'),
|
2014-02-13 13:12:39 +00:00
|
|
|
(OUTCOME_EMPTY, 'Empty'),
|
2013-10-11 12:46:23 +00:00
|
|
|
)
|
|
|
|
|
2014-02-21 08:58:01 +00:00
|
|
|
TASK_OUTCOME_HELP = (
|
2014-03-16 13:09:34 +00:00
|
|
|
(OUTCOME_SUCCESS, 'This task successfully completed'),
|
2014-02-21 08:58:01 +00:00
|
|
|
(OUTCOME_COVERED, 'This task did not run because its output is provided by another task'),
|
|
|
|
(OUTCOME_CACHED, 'This task restored output from the sstate-cache directory or mirrors'),
|
|
|
|
(OUTCOME_PREBUILT, 'This task did not run because its outcome was reused from a previous build'),
|
|
|
|
(OUTCOME_FAILED, 'This task did not complete'),
|
2014-03-07 06:15:58 +00:00
|
|
|
(OUTCOME_EMPTY, 'This task has no executable content'),
|
2014-02-21 08:58:01 +00:00
|
|
|
(OUTCOME_NA, ''),
|
|
|
|
)
|
2014-02-18 04:30:41 +00:00
|
|
|
|
2014-02-14 22:58:20 +00:00
|
|
|
search_allowed_fields = [ "recipe__name", "recipe__version", "task_name", "logfile" ]
|
2014-01-07 13:10:42 +00:00
|
|
|
|
2015-08-18 16:28:51 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(Task, self).__init__(*args, **kwargs)
|
|
|
|
try:
|
|
|
|
self._helptext = HelpText.objects.get(key=self.task_name, area=HelpText.VARIABLE, build=self.build).text
|
|
|
|
except HelpText.DoesNotExist:
|
|
|
|
self._helptext = None
|
|
|
|
|
2014-01-14 12:35:12 +00:00
|
|
|
def get_related_setscene(self):
|
2015-02-26 21:41:56 +00:00
|
|
|
return Task.objects.filter(task_executed=True, build = self.build, recipe = self.recipe, task_name=self.task_name+"_setscene")
|
2014-01-14 12:35:12 +00:00
|
|
|
|
2014-04-11 04:43:11 +00:00
|
|
|
def get_outcome_text(self):
|
2015-08-18 16:28:51 +00:00
|
|
|
return Task.TASK_OUTCOME[int(self.outcome) + 1][1]
|
2014-04-11 04:43:11 +00:00
|
|
|
|
2014-02-21 08:58:01 +00:00
|
|
|
def get_outcome_help(self):
|
2015-08-18 16:28:51 +00:00
|
|
|
return Task.TASK_OUTCOME_HELP[int(self.outcome)][1]
|
2014-02-18 04:30:41 +00:00
|
|
|
|
2014-04-11 04:43:11 +00:00
|
|
|
def get_sstate_text(self):
|
|
|
|
if self.sstate_result==Task.SSTATE_NA:
|
|
|
|
return ''
|
|
|
|
else:
|
2015-08-18 16:28:51 +00:00
|
|
|
return Task.SSTATE_RESULT[int(self.sstate_result)][1]
|
2014-04-11 04:43:11 +00:00
|
|
|
|
2014-02-13 13:36:54 +00:00
|
|
|
def get_executed_display(self):
|
|
|
|
if self.task_executed:
|
|
|
|
return "Executed"
|
|
|
|
return "Not Executed"
|
|
|
|
|
2014-03-18 19:17:31 +00:00
|
|
|
def get_description(self):
|
2015-02-17 17:24:29 +00:00
|
|
|
return self._helptext
|
2014-03-18 19:17:31 +00:00
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
build = models.ForeignKey(Build, related_name='task_build')
|
|
|
|
order = models.IntegerField(null=True)
|
2014-01-14 12:35:12 +00:00
|
|
|
task_executed = models.BooleanField(default=False) # True means Executed, False means Not/Executed
|
2013-10-11 12:46:23 +00:00
|
|
|
outcome = models.IntegerField(choices=TASK_OUTCOME, default=OUTCOME_NA)
|
|
|
|
sstate_checksum = models.CharField(max_length=100, blank=True)
|
|
|
|
path_to_sstate_obj = models.FilePathField(max_length=500, blank=True)
|
2015-02-26 21:41:56 +00:00
|
|
|
recipe = models.ForeignKey('Recipe', related_name='tasks')
|
2013-10-11 12:46:23 +00:00
|
|
|
task_name = models.CharField(max_length=100)
|
|
|
|
source_url = models.FilePathField(max_length=255, blank=True)
|
|
|
|
work_directory = models.FilePathField(max_length=255, blank=True)
|
2013-11-14 10:52:58 +00:00
|
|
|
script_type = models.IntegerField(choices=TASK_CODING, default=CODING_NA)
|
2013-10-11 12:46:23 +00:00
|
|
|
line_number = models.IntegerField(default=0)
|
2016-03-08 11:32:12 +00:00
|
|
|
|
|
|
|
# start/end times
|
|
|
|
started = models.DateTimeField(null=True)
|
|
|
|
ended = models.DateTimeField(null=True)
|
|
|
|
|
|
|
|
# in seconds; this is stored to enable sorting
|
2015-05-14 15:44:27 +00:00
|
|
|
elapsed_time = models.DecimalField(max_digits=8, decimal_places=2, null=True)
|
2016-03-08 11:32:12 +00:00
|
|
|
|
|
|
|
# in bytes; note that disk_io is stored to enable sorting
|
|
|
|
disk_io = models.IntegerField(null=True)
|
|
|
|
disk_io_read = models.IntegerField(null=True)
|
|
|
|
disk_io_write = models.IntegerField(null=True)
|
|
|
|
|
|
|
|
# in seconds
|
|
|
|
cpu_time_user = models.DecimalField(max_digits=8, decimal_places=2, null=True)
|
|
|
|
cpu_time_system = models.DecimalField(max_digits=8, decimal_places=2, null=True)
|
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
sstate_result = models.IntegerField(choices=SSTATE_RESULT, default=SSTATE_NA)
|
|
|
|
message = models.CharField(max_length=240)
|
|
|
|
logfile = models.FilePathField(max_length=255, blank=True)
|
|
|
|
|
2014-04-11 04:43:11 +00:00
|
|
|
outcome_text = property(get_outcome_text)
|
|
|
|
sstate_text = property(get_sstate_text)
|
|
|
|
|
2015-02-24 17:20:58 +00:00
|
|
|
def __unicode__(self):
|
2016-01-15 11:00:46 +00:00
|
|
|
return "%d(%d) %s:%s" % (self.pk, self.build.pk, self.recipe.name, self.task_name)
|
2015-02-26 21:41:56 +00:00
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Meta:
|
|
|
|
ordering = ('order', 'recipe' ,)
|
2014-02-13 13:12:39 +00:00
|
|
|
unique_together = ('build', 'recipe', 'task_name', )
|
2013-10-11 12:46:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Task_Dependency(models.Model):
|
|
|
|
task = models.ForeignKey(Task, related_name='task_dependencies_task')
|
|
|
|
depends_on = models.ForeignKey(Task, related_name='task_dependencies_depends')
|
|
|
|
|
2013-11-26 18:12:43 +00:00
|
|
|
class Package(models.Model):
|
2015-05-14 15:10:50 +00:00
|
|
|
search_allowed_fields = ['name', 'version', 'revision', 'recipe__name', 'recipe__version', 'recipe__license', 'recipe__layer_version__layer__name', 'recipe__layer_version__branch', 'recipe__layer_version__commit', 'recipe__layer_version__local_path', 'installed_name']
|
2015-09-29 04:45:18 +00:00
|
|
|
build = models.ForeignKey('Build', null=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
recipe = models.ForeignKey('Recipe', null=True)
|
|
|
|
name = models.CharField(max_length=100)
|
2014-01-17 17:58:05 +00:00
|
|
|
installed_name = models.CharField(max_length=100, default='')
|
2013-10-11 12:46:23 +00:00
|
|
|
version = models.CharField(max_length=100, blank=True)
|
|
|
|
revision = models.CharField(max_length=32, blank=True)
|
2014-10-09 11:37:30 +00:00
|
|
|
summary = models.TextField(blank=True)
|
2014-04-28 14:11:03 +00:00
|
|
|
description = models.TextField(blank=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
size = models.IntegerField(default=0)
|
2013-11-26 18:12:43 +00:00
|
|
|
installed_size = models.IntegerField(default=0)
|
2013-10-11 12:46:23 +00:00
|
|
|
section = models.CharField(max_length=80, blank=True)
|
|
|
|
license = models.CharField(max_length=80, blank=True)
|
|
|
|
|
2016-01-21 17:11:11 +00:00
|
|
|
@property
|
|
|
|
def is_locale_package(self):
|
|
|
|
""" Returns True if this package is identifiable as a locale package """
|
|
|
|
if self.name.find('locale') != -1:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_packagegroup(self):
|
|
|
|
""" Returns True is this package is identifiable as a packagegroup """
|
|
|
|
if self.name.find('packagegroup') != -1:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-12-07 18:22:08 +00:00
|
|
|
class CustomImagePackage(Package):
|
|
|
|
# CustomImageRecipe fields to track pacakges appended,
|
|
|
|
# included and excluded from a CustomImageRecipe
|
|
|
|
recipe_includes = models.ManyToManyField('CustomImageRecipe',
|
|
|
|
related_name='includes_set')
|
|
|
|
recipe_excludes = models.ManyToManyField('CustomImageRecipe',
|
|
|
|
related_name='excludes_set')
|
|
|
|
recipe_appends = models.ManyToManyField('CustomImageRecipe',
|
|
|
|
related_name='appends_set')
|
|
|
|
|
|
|
|
|
2014-03-19 12:24:47 +00:00
|
|
|
class Package_DependencyManager(models.Manager):
|
|
|
|
use_for_related_fields = True
|
2016-05-26 15:12:27 +00:00
|
|
|
TARGET_LATEST = "use-latest-target-for-target"
|
2014-03-19 12:24:47 +00:00
|
|
|
|
2015-12-10 03:56:31 +00:00
|
|
|
def get_queryset(self):
|
|
|
|
return super(Package_DependencyManager, self).get_queryset().exclude(package_id = F('depends_on__id'))
|
2014-03-19 12:24:47 +00:00
|
|
|
|
2016-05-26 15:12:27 +00:00
|
|
|
def for_target_or_none(self, target):
|
|
|
|
""" filter the dependencies to be displayed by the supplied target
|
|
|
|
if no dependences are found for the target then try None as the target
|
|
|
|
which will return the dependences calculated without the context of a
|
|
|
|
target e.g. non image recipes.
|
2015-11-04 14:48:54 +00:00
|
|
|
|
2016-05-26 15:12:27 +00:00
|
|
|
returns: { size, packages }
|
2016-04-06 13:44:41 +00:00
|
|
|
"""
|
2016-05-26 15:12:27 +00:00
|
|
|
package_dependencies = self.all_depends().order_by('depends_on__name')
|
|
|
|
|
|
|
|
if target is self.TARGET_LATEST:
|
|
|
|
installed_deps =\
|
|
|
|
package_dependencies.filter(~Q(target__target=None))
|
|
|
|
else:
|
|
|
|
installed_deps =\
|
|
|
|
package_dependencies.filter(Q(target__target=target))
|
|
|
|
|
|
|
|
packages_list = None
|
|
|
|
total_size = 0
|
|
|
|
|
|
|
|
# If we have installed depdencies for this package and target then use
|
|
|
|
# these to display
|
|
|
|
if installed_deps.count() > 0:
|
|
|
|
packages_list = installed_deps
|
|
|
|
total_size = installed_deps.aggregate(
|
|
|
|
Sum('depends_on__size'))['depends_on__size__sum']
|
|
|
|
else:
|
|
|
|
new_list = []
|
|
|
|
package_names = []
|
2016-04-06 13:44:41 +00:00
|
|
|
|
2016-05-26 15:12:27 +00:00
|
|
|
# Find dependencies for the package that we know about even if
|
|
|
|
# it's not installed on a target e.g. from a non-image recipe
|
|
|
|
for p in package_dependencies.filter(Q(target=None)):
|
|
|
|
if p.depends_on.name in package_names:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
package_names.append(p.depends_on.name)
|
|
|
|
new_list.append(p.pk)
|
|
|
|
# while we're here we may as well total up the size to
|
|
|
|
# avoid iterating again
|
|
|
|
total_size += p.depends_on.size
|
|
|
|
|
|
|
|
# We want to return a queryset here for consistency so pick the
|
|
|
|
# deps from the new_list
|
|
|
|
packages_list = package_dependencies.filter(Q(pk__in=new_list))
|
|
|
|
|
|
|
|
return {'packages': packages_list,
|
|
|
|
'size': total_size}
|
2016-04-06 13:44:41 +00:00
|
|
|
|
2016-02-05 11:18:09 +00:00
|
|
|
def all_depends(self):
|
2016-05-26 15:12:27 +00:00
|
|
|
""" Returns just the depends packages and not any other dep_type
|
|
|
|
Note that this is for any target
|
|
|
|
"""
|
2016-02-05 11:18:09 +00:00
|
|
|
return self.filter(Q(dep_type=Package_Dependency.TYPE_RDEPENDS) |
|
|
|
|
Q(dep_type=Package_Dependency.TYPE_TRDEPENDS))
|
|
|
|
|
2016-05-26 15:12:27 +00:00
|
|
|
|
2013-11-26 18:12:43 +00:00
|
|
|
class Package_Dependency(models.Model):
|
2013-10-11 12:46:23 +00:00
|
|
|
TYPE_RDEPENDS = 0
|
2014-01-23 17:47:41 +00:00
|
|
|
TYPE_TRDEPENDS = 1
|
2013-10-11 12:46:23 +00:00
|
|
|
TYPE_RRECOMMENDS = 2
|
2014-01-23 17:47:41 +00:00
|
|
|
TYPE_TRECOMMENDS = 3
|
|
|
|
TYPE_RSUGGESTS = 4
|
|
|
|
TYPE_RPROVIDES = 5
|
|
|
|
TYPE_RREPLACES = 6
|
|
|
|
TYPE_RCONFLICTS = 7
|
|
|
|
' TODO: bpackage should be changed to remove the DEPENDS_TYPE access '
|
2013-10-11 12:46:23 +00:00
|
|
|
DEPENDS_TYPE = (
|
2014-01-23 17:47:41 +00:00
|
|
|
(TYPE_RDEPENDS, "depends"),
|
|
|
|
(TYPE_TRDEPENDS, "depends"),
|
|
|
|
(TYPE_TRECOMMENDS, "recommends"),
|
|
|
|
(TYPE_RRECOMMENDS, "recommends"),
|
|
|
|
(TYPE_RSUGGESTS, "suggests"),
|
|
|
|
(TYPE_RPROVIDES, "provides"),
|
|
|
|
(TYPE_RREPLACES, "replaces"),
|
|
|
|
(TYPE_RCONFLICTS, "conflicts"),
|
2013-10-11 12:46:23 +00:00
|
|
|
)
|
2014-11-14 17:07:06 +00:00
|
|
|
""" Indexed by dep_type, in view order, key for short name and help
|
2014-01-23 17:47:41 +00:00
|
|
|
description which when viewed will be printf'd with the
|
|
|
|
package name.
|
2014-11-14 17:07:06 +00:00
|
|
|
"""
|
2014-01-23 17:47:41 +00:00
|
|
|
DEPENDS_DICT = {
|
|
|
|
TYPE_RDEPENDS : ("depends", "%s is required to run %s"),
|
|
|
|
TYPE_TRDEPENDS : ("depends", "%s is required to run %s"),
|
|
|
|
TYPE_TRECOMMENDS : ("recommends", "%s extends the usability of %s"),
|
|
|
|
TYPE_RRECOMMENDS : ("recommends", "%s extends the usability of %s"),
|
|
|
|
TYPE_RSUGGESTS : ("suggests", "%s is suggested for installation with %s"),
|
|
|
|
TYPE_RPROVIDES : ("provides", "%s is provided by %s"),
|
|
|
|
TYPE_RREPLACES : ("replaces", "%s is replaced by %s"),
|
|
|
|
TYPE_RCONFLICTS : ("conflicts", "%s conflicts with %s, which will not be installed if this package is not first removed"),
|
|
|
|
}
|
|
|
|
|
2013-11-26 18:12:43 +00:00
|
|
|
package = models.ForeignKey(Package, related_name='package_dependencies_source')
|
|
|
|
depends_on = models.ForeignKey(Package, related_name='package_dependencies_target') # soft dependency
|
2013-10-11 12:46:23 +00:00
|
|
|
dep_type = models.IntegerField(choices=DEPENDS_TYPE)
|
2013-11-26 18:12:43 +00:00
|
|
|
target = models.ForeignKey(Target, null=True)
|
2014-03-19 12:24:47 +00:00
|
|
|
objects = Package_DependencyManager()
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2013-11-26 18:12:43 +00:00
|
|
|
class Target_Installed_Package(models.Model):
|
|
|
|
target = models.ForeignKey(Target)
|
2014-01-23 17:47:41 +00:00
|
|
|
package = models.ForeignKey(Package, related_name='buildtargetlist_package')
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
|
2013-11-26 18:12:43 +00:00
|
|
|
class Package_File(models.Model):
|
|
|
|
package = models.ForeignKey(Package, related_name='buildfilelist_package')
|
2013-10-11 12:46:23 +00:00
|
|
|
path = models.FilePathField(max_length=255, blank=True)
|
|
|
|
size = models.IntegerField()
|
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Recipe(models.Model):
|
2016-07-21 13:43:25 +00:00
|
|
|
search_allowed_fields = ['name', 'version', 'file_path', 'section',
|
|
|
|
'summary', 'description', 'license',
|
|
|
|
'layer_version__layer__name',
|
|
|
|
'layer_version__branch', 'layer_version__commit',
|
|
|
|
'layer_version__local_path',
|
|
|
|
'layer_version__layer_source']
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
up_date = models.DateTimeField(null=True, default=None)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
name = models.CharField(max_length=100, blank=True)
|
|
|
|
version = models.CharField(max_length=100, blank=True)
|
|
|
|
layer_version = models.ForeignKey('Layer_Version',
|
|
|
|
related_name='recipe_layer_version')
|
2014-10-09 11:37:30 +00:00
|
|
|
summary = models.TextField(blank=True)
|
2014-04-28 14:11:03 +00:00
|
|
|
description = models.TextField(blank=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
section = models.CharField(max_length=100, blank=True)
|
|
|
|
license = models.CharField(max_length=200, blank=True)
|
|
|
|
homepage = models.URLField(blank=True)
|
|
|
|
bugtracker = models.URLField(blank=True)
|
|
|
|
file_path = models.FilePathField(max_length=255)
|
2015-05-14 15:10:50 +00:00
|
|
|
pathflags = models.CharField(max_length=200, blank=True)
|
2015-07-28 14:24:45 +00:00
|
|
|
is_image = models.BooleanField(default=False)
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return "Recipe " + self.name + ":" + self.version
|
|
|
|
|
2015-05-11 17:51:28 +00:00
|
|
|
def get_vcs_recipe_file_link_url(self):
|
|
|
|
return self.layer_version.get_vcs_file_link_url(self.file_path)
|
|
|
|
|
|
|
|
def get_description_or_summary(self):
|
|
|
|
if self.description:
|
|
|
|
return self.description
|
|
|
|
elif self.summary:
|
|
|
|
return self.summary
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
2014-11-05 14:47:51 +00:00
|
|
|
class Meta:
|
2015-05-14 15:10:50 +00:00
|
|
|
unique_together = (("layer_version", "file_path", "pathflags"), )
|
|
|
|
|
2014-11-05 14:47:51 +00:00
|
|
|
|
2014-03-19 12:24:47 +00:00
|
|
|
class Recipe_DependencyManager(models.Manager):
|
|
|
|
use_for_related_fields = True
|
|
|
|
|
2015-12-10 03:56:31 +00:00
|
|
|
def get_queryset(self):
|
|
|
|
return super(Recipe_DependencyManager, self).get_queryset().exclude(recipe_id = F('depends_on__id'))
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2016-01-08 11:17:18 +00:00
|
|
|
class Provides(models.Model):
|
|
|
|
name = models.CharField(max_length=100)
|
|
|
|
recipe = models.ForeignKey(Recipe)
|
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Recipe_Dependency(models.Model):
|
|
|
|
TYPE_DEPENDS = 0
|
|
|
|
TYPE_RDEPENDS = 1
|
|
|
|
|
|
|
|
DEPENDS_TYPE = (
|
|
|
|
(TYPE_DEPENDS, "depends"),
|
|
|
|
(TYPE_RDEPENDS, "rdepends"),
|
|
|
|
)
|
|
|
|
recipe = models.ForeignKey(Recipe, related_name='r_dependencies_recipe')
|
|
|
|
depends_on = models.ForeignKey(Recipe, related_name='r_dependencies_depends')
|
2016-01-08 11:17:18 +00:00
|
|
|
via = models.ForeignKey(Provides, null=True, default=None)
|
2013-10-11 12:46:23 +00:00
|
|
|
dep_type = models.IntegerField(choices=DEPENDS_TYPE)
|
2014-03-19 12:24:47 +00:00
|
|
|
objects = Recipe_DependencyManager()
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2014-06-03 15:26:17 +00:00
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
class Machine(models.Model):
|
2015-02-05 14:24:01 +00:00
|
|
|
search_allowed_fields = ["name", "description", "layer_version__layer__name"]
|
2014-08-08 14:03:03 +00:00
|
|
|
up_date = models.DateTimeField(null = True, default = None)
|
|
|
|
|
|
|
|
layer_version = models.ForeignKey('Layer_Version')
|
|
|
|
name = models.CharField(max_length=255)
|
|
|
|
description = models.CharField(max_length=255)
|
|
|
|
|
2015-02-03 15:40:41 +00:00
|
|
|
def get_vcs_machine_file_link_url(self):
|
|
|
|
path = 'conf/machine/'+self.name+'.conf'
|
|
|
|
|
|
|
|
return self.layer_version.get_vcs_file_link_url(path)
|
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return "Machine " + self.name + "(" + self.description + ")"
|
|
|
|
|
|
|
|
|
2014-11-14 17:07:06 +00:00
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
class BitbakeVersion(models.Model):
|
2014-10-09 11:37:30 +00:00
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
name = models.CharField(max_length=32, unique = True)
|
2014-10-09 11:37:30 +00:00
|
|
|
giturl = GitURLField()
|
2014-08-08 14:03:03 +00:00
|
|
|
branch = models.CharField(max_length=32)
|
|
|
|
dirpath = models.CharField(max_length=255)
|
|
|
|
|
2014-10-09 11:37:30 +00:00
|
|
|
def __unicode__(self):
|
2015-03-12 11:30:43 +00:00
|
|
|
return "%s (Branch: %s)" % (self.name, self.branch)
|
2014-10-09 11:37:30 +00:00
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
|
|
|
|
class Release(models.Model):
|
2014-11-14 17:07:06 +00:00
|
|
|
""" A release is a project template, used to pre-populate Project settings with a configuration set """
|
2014-08-08 14:03:03 +00:00
|
|
|
name = models.CharField(max_length=32, unique = True)
|
|
|
|
description = models.CharField(max_length=255)
|
|
|
|
bitbake_version = models.ForeignKey(BitbakeVersion)
|
2014-11-14 17:07:06 +00:00
|
|
|
branch_name = models.CharField(max_length=50, default = "")
|
2014-11-05 14:47:51 +00:00
|
|
|
helptext = models.TextField(null=True)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2014-11-14 17:07:06 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return "%s (%s)" % (self.name, self.branch_name)
|
|
|
|
|
2016-07-21 13:43:31 +00:00
|
|
|
def __str__(self):
|
|
|
|
return self.name
|
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
class ReleaseDefaultLayer(models.Model):
|
|
|
|
release = models.ForeignKey(Release)
|
2014-11-14 17:07:06 +00:00
|
|
|
layer_name = models.CharField(max_length=100, default="")
|
2014-08-08 14:03:03 +00:00
|
|
|
|
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
class LayerSource(object):
|
|
|
|
""" Where the layer metadata came from """
|
|
|
|
TYPE_LOCAL = 0
|
|
|
|
TYPE_LAYERINDEX = 1
|
|
|
|
TYPE_IMPORTED = 2
|
|
|
|
TYPE_BUILD = 3
|
|
|
|
|
|
|
|
SOURCE_TYPE = (
|
|
|
|
(TYPE_LOCAL, "local"),
|
|
|
|
(TYPE_LAYERINDEX, "layerindex"),
|
|
|
|
(TYPE_IMPORTED, "imported"),
|
|
|
|
(TYPE_BUILD, "build"),
|
|
|
|
)
|
|
|
|
|
2016-07-21 13:43:27 +00:00
|
|
|
def types_dict():
|
|
|
|
""" Turn the TYPES enums into a simple dictionary """
|
|
|
|
dictionary = {}
|
|
|
|
for key in LayerSource.__dict__:
|
|
|
|
if "TYPE" in key:
|
|
|
|
dictionary[key] = getattr(LayerSource, key)
|
|
|
|
return dictionary
|
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Layer(models.Model):
|
2016-07-21 13:43:25 +00:00
|
|
|
|
|
|
|
up_date = models.DateTimeField(null=True, default=timezone.now)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
name = models.CharField(max_length=100)
|
|
|
|
layer_index_url = models.URLField()
|
2016-07-21 13:43:25 +00:00
|
|
|
vcs_url = GitURLField(default=None, null=True)
|
2016-07-20 08:57:11 +00:00
|
|
|
local_source_dir = models.TextField(null = True, default = None)
|
2016-07-21 13:43:25 +00:00
|
|
|
vcs_web_url = models.URLField(null=True, default=None)
|
|
|
|
vcs_web_tree_base_url = models.URLField(null=True, default=None)
|
|
|
|
vcs_web_file_base_url = models.URLField(null=True, default=None)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
summary = models.TextField(help_text='One-line description of the layer',
|
|
|
|
null=True, default=None)
|
|
|
|
description = models.TextField(null=True, default=None)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
|
|
|
def __unicode__(self):
|
2016-07-21 13:43:25 +00:00
|
|
|
return "%s / %s " % (self.name, self.summary)
|
2013-10-11 12:46:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Layer_Version(models.Model):
|
2016-04-19 16:28:46 +00:00
|
|
|
"""
|
|
|
|
A Layer_Version either belongs to a single project or no project
|
|
|
|
"""
|
2016-07-21 13:43:25 +00:00
|
|
|
search_allowed_fields = ["layer__name", "layer__summary",
|
|
|
|
"layer__description", "layer__vcs_url",
|
2016-07-21 13:43:30 +00:00
|
|
|
"dirpath", "release__name", "commit", "branch"]
|
2016-07-21 13:43:25 +00:00
|
|
|
|
|
|
|
build = models.ForeignKey(Build, related_name='layer_version_build',
|
|
|
|
default=None, null=True)
|
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
layer = models.ForeignKey(Layer, related_name='layer_version_layer')
|
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
layer_source = models.IntegerField(choices=LayerSource.SOURCE_TYPE,
|
|
|
|
default=0)
|
|
|
|
|
|
|
|
up_date = models.DateTimeField(null=True, default=timezone.now)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2016-07-21 13:43:30 +00:00
|
|
|
# To which metadata release does this layer version belong to
|
|
|
|
release = models.ForeignKey(Release, null=True, default=None)
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
branch = models.CharField(max_length=80)
|
|
|
|
commit = models.CharField(max_length=100)
|
|
|
|
# If the layer is in a subdir
|
|
|
|
dirpath = models.CharField(max_length=255, null=True, default=None)
|
2015-05-14 15:10:50 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
# if -1, this is a default layer
|
|
|
|
priority = models.IntegerField(default=0)
|
2014-11-24 12:52:49 +00:00
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
# where this layer exists on the filesystem
|
|
|
|
local_path = models.FilePathField(max_length=1024, default="/")
|
|
|
|
|
|
|
|
# Set if this layer is restricted to a particular project
|
|
|
|
project = models.ForeignKey('Project', null=True, default=None)
|
|
|
|
|
|
|
|
# code lifted, with adaptations, from the layerindex-web application
|
|
|
|
# https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/
|
2014-10-20 15:26:14 +00:00
|
|
|
def _handle_url_path(self, base_url, path):
|
2015-02-03 15:40:41 +00:00
|
|
|
import re, posixpath
|
2014-10-20 15:26:14 +00:00
|
|
|
if base_url:
|
|
|
|
if self.dirpath:
|
|
|
|
if path:
|
|
|
|
extra_path = self.dirpath + '/' + path
|
|
|
|
# Normalise out ../ in path for usage URL
|
|
|
|
extra_path = posixpath.normpath(extra_path)
|
|
|
|
# Minor workaround to handle case where subdirectory has been added between branches
|
|
|
|
# (should probably support usage URL per branch to handle this... sigh...)
|
|
|
|
if extra_path.startswith('../'):
|
|
|
|
extra_path = extra_path[3:]
|
|
|
|
else:
|
|
|
|
extra_path = self.dirpath
|
|
|
|
else:
|
|
|
|
extra_path = path
|
2016-07-21 13:43:30 +00:00
|
|
|
branchname = self.release.name
|
2014-10-20 15:26:14 +00:00
|
|
|
url = base_url.replace('%branch%', branchname)
|
|
|
|
|
|
|
|
# If there's a % in the path (e.g. a wildcard bbappend) we need to encode it
|
|
|
|
if extra_path:
|
|
|
|
extra_path = extra_path.replace('%', '%25')
|
|
|
|
|
|
|
|
if '%path%' in base_url:
|
|
|
|
if extra_path:
|
|
|
|
url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '\\1', url)
|
|
|
|
else:
|
|
|
|
url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '', url)
|
|
|
|
return url.replace('%path%', extra_path)
|
|
|
|
else:
|
|
|
|
return url + extra_path
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_vcs_link_url(self):
|
|
|
|
if self.layer.vcs_web_url is None:
|
|
|
|
return None
|
|
|
|
return self.layer.vcs_web_url
|
|
|
|
|
|
|
|
def get_vcs_file_link_url(self, file_path=""):
|
2014-09-29 19:20:33 +00:00
|
|
|
if self.layer.vcs_web_file_base_url is None:
|
|
|
|
return None
|
2016-07-21 13:43:25 +00:00
|
|
|
return self._handle_url_path(self.layer.vcs_web_file_base_url,
|
|
|
|
file_path)
|
2014-09-29 19:20:33 +00:00
|
|
|
|
2014-10-20 15:26:14 +00:00
|
|
|
def get_vcs_dirpath_link_url(self):
|
|
|
|
if self.layer.vcs_web_tree_base_url is None:
|
|
|
|
return None
|
|
|
|
return self._handle_url_path(self.layer.vcs_web_tree_base_url, '')
|
2014-09-29 19:20:33 +00:00
|
|
|
|
2015-01-08 13:15:13 +00:00
|
|
|
def get_vcs_reference(self):
|
|
|
|
if self.branch is not None and len(self.branch) > 0:
|
|
|
|
return self.branch
|
2016-07-21 13:43:30 +00:00
|
|
|
if self.release is not None:
|
|
|
|
return self.release.name
|
2015-09-29 04:45:28 +00:00
|
|
|
if self.commit is not None and len(self.commit) > 0:
|
|
|
|
return self.commit
|
2015-10-23 21:57:18 +00:00
|
|
|
return 'N/A'
|
2014-09-29 19:20:33 +00:00
|
|
|
|
2015-08-04 19:46:29 +00:00
|
|
|
def get_detailspage_url(self, project_id):
|
|
|
|
return reverse('layerdetails', args=(project_id, self.pk))
|
|
|
|
|
2015-10-02 18:05:12 +00:00
|
|
|
def get_alldeps(self, project_id):
|
|
|
|
"""Get full list of unique layer dependencies."""
|
|
|
|
def gen_layerdeps(lver, project):
|
|
|
|
for ldep in lver.dependencies.all():
|
|
|
|
yield ldep.depends_on
|
|
|
|
# get next level of deps recursively calling gen_layerdeps
|
|
|
|
for subdep in gen_layerdeps(ldep.depends_on, project):
|
|
|
|
yield subdep
|
|
|
|
|
|
|
|
project = Project.objects.get(pk=project_id)
|
|
|
|
result = []
|
|
|
|
projectlvers = [player.layercommit for player in project.projectlayer_set.all()]
|
|
|
|
for dep in gen_layerdeps(self, project):
|
|
|
|
# filter out duplicates and layers already belonging to the project
|
|
|
|
if dep not in result + projectlvers:
|
|
|
|
result.append(dep)
|
|
|
|
|
|
|
|
return sorted(result, key=lambda x: x.layer.name)
|
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
def __unicode__(self):
|
2016-07-21 13:43:25 +00:00
|
|
|
return ("id %d belongs to layer: %s" % (self.pk, self.layer.name))
|
2014-08-08 14:03:03 +00:00
|
|
|
|
2016-07-21 13:43:31 +00:00
|
|
|
def __str__(self):
|
|
|
|
if self.release:
|
|
|
|
release = self.release.name
|
|
|
|
else:
|
|
|
|
release = "No release set"
|
|
|
|
|
|
|
|
return "%d %s (%s)" % (self.pk, self.layer.name, release)
|
|
|
|
|
2014-08-08 14:03:03 +00:00
|
|
|
|
|
|
|
class LayerVersionDependency(models.Model):
|
|
|
|
|
2016-07-21 13:43:25 +00:00
|
|
|
layer_version = models.ForeignKey(Layer_Version,
|
|
|
|
related_name="dependencies")
|
|
|
|
depends_on = models.ForeignKey(Layer_Version,
|
|
|
|
related_name="dependees")
|
2014-08-08 14:03:03 +00:00
|
|
|
|
|
|
|
class ProjectLayer(models.Model):
|
|
|
|
project = models.ForeignKey(Project)
|
|
|
|
layercommit = models.ForeignKey(Layer_Version, null=True)
|
|
|
|
optional = models.BooleanField(default = True)
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2014-11-14 17:07:06 +00:00
|
|
|
def __unicode__(self):
|
|
|
|
return "%s, %s" % (self.project.name, self.layercommit)
|
|
|
|
|
2014-11-05 14:47:51 +00:00
|
|
|
class Meta:
|
|
|
|
unique_together = (("project", "layercommit"),)
|
|
|
|
|
2015-11-04 14:54:41 +00:00
|
|
|
class CustomImageRecipe(Recipe):
|
2016-03-23 08:28:37 +00:00
|
|
|
|
|
|
|
# CustomImageRecipe's belong to layers called:
|
|
|
|
LAYER_NAME = "toaster-custom-images"
|
|
|
|
|
2015-11-26 16:44:32 +00:00
|
|
|
search_allowed_fields = ['name']
|
2015-11-04 14:54:41 +00:00
|
|
|
base_recipe = models.ForeignKey(Recipe, related_name='based_on_recipe')
|
2015-09-29 04:45:18 +00:00
|
|
|
project = models.ForeignKey(Project)
|
2016-02-05 11:44:57 +00:00
|
|
|
last_updated = models.DateTimeField(null=True, default=None)
|
2015-09-29 04:45:18 +00:00
|
|
|
|
2016-02-05 11:41:43 +00:00
|
|
|
def get_last_successful_built_target(self):
|
|
|
|
""" Return the last successful built target object if one exists
|
|
|
|
otherwise return None """
|
|
|
|
return Target.objects.filter(Q(build__outcome=Build.SUCCEEDED) &
|
|
|
|
Q(build__project=self.project) &
|
|
|
|
Q(target=self.name)).last()
|
|
|
|
|
2016-02-05 11:59:17 +00:00
|
|
|
def update_package_list(self):
|
|
|
|
""" Update the package list from the last good build of this
|
|
|
|
CustomImageRecipe
|
|
|
|
"""
|
|
|
|
# Check if we're aldready up-to-date or not
|
|
|
|
target = self.get_last_successful_built_target()
|
|
|
|
if target == None:
|
|
|
|
# So we've never actually built this Custom recipe but what about
|
|
|
|
# the recipe it's based on?
|
|
|
|
target = \
|
|
|
|
Target.objects.filter(Q(build__outcome=Build.SUCCEEDED) &
|
|
|
|
Q(build__project=self.project) &
|
|
|
|
Q(target=self.base_recipe.name)).last()
|
|
|
|
if target == None:
|
|
|
|
return
|
|
|
|
|
|
|
|
if target.build.completed_on == self.last_updated:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.includes_set.clear()
|
|
|
|
|
|
|
|
excludes_list = self.excludes_set.values_list('name', flat=True)
|
|
|
|
appends_list = self.appends_set.values_list('name', flat=True)
|
|
|
|
|
|
|
|
built_packages_list = \
|
|
|
|
target.target_installed_package_set.values_list('package__name',
|
|
|
|
flat=True)
|
|
|
|
for built_package in built_packages_list:
|
|
|
|
# Is the built package in the custom packages list?
|
|
|
|
if built_package in excludes_list:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if built_package in appends_list:
|
|
|
|
continue
|
|
|
|
|
|
|
|
cust_img_p = \
|
|
|
|
CustomImagePackage.objects.get(name=built_package)
|
|
|
|
self.includes_set.add(cust_img_p)
|
|
|
|
|
|
|
|
|
|
|
|
self.last_updated = target.build.completed_on
|
|
|
|
self.save()
|
|
|
|
|
2015-12-07 18:23:32 +00:00
|
|
|
def get_all_packages(self):
|
|
|
|
"""Get the included packages and any appended packages"""
|
2016-02-05 11:59:17 +00:00
|
|
|
self.update_package_list()
|
|
|
|
|
2015-12-07 18:23:32 +00:00
|
|
|
return CustomImagePackage.objects.filter((Q(recipe_appends=self) |
|
2016-02-05 11:59:17 +00:00
|
|
|
Q(recipe_includes=self)) &
|
|
|
|
~Q(recipe_excludes=self))
|
|
|
|
|
2016-04-26 16:18:06 +00:00
|
|
|
def get_base_recipe_file(self):
|
|
|
|
"""Get the base recipe file path if it exists on the file system"""
|
|
|
|
path_schema_one = "%s/%s" % (self.base_recipe.layer_version.dirpath,
|
|
|
|
self.base_recipe.file_path)
|
|
|
|
|
|
|
|
path_schema_two = self.base_recipe.file_path
|
|
|
|
|
|
|
|
if os.path.exists(path_schema_one):
|
|
|
|
return path_schema_one
|
|
|
|
|
|
|
|
# The path may now be the full path if the recipe has been built
|
|
|
|
if os.path.exists(path_schema_two):
|
|
|
|
return path_schema_two
|
|
|
|
|
|
|
|
return None
|
2015-12-07 18:23:32 +00:00
|
|
|
|
2015-11-04 14:56:36 +00:00
|
|
|
def generate_recipe_file_contents(self):
|
|
|
|
"""Generate the contents for the recipe file."""
|
|
|
|
# If we have no excluded packages we only need to _append
|
|
|
|
if self.excludes_set.count() == 0:
|
|
|
|
packages_conf = "IMAGE_INSTALL_append = \" "
|
|
|
|
|
|
|
|
for pkg in self.appends_set.all():
|
|
|
|
packages_conf += pkg.name+' '
|
|
|
|
else:
|
2016-01-28 18:01:39 +00:00
|
|
|
packages_conf = "IMAGE_FEATURES =\"\"\nIMAGE_INSTALL = \""
|
2015-11-04 14:56:36 +00:00
|
|
|
# We add all the known packages to be built by this recipe apart
|
2016-02-05 12:37:08 +00:00
|
|
|
# from locale packages which are are controlled with IMAGE_LINGUAS.
|
|
|
|
for pkg in self.get_all_packages().exclude(
|
2016-04-26 16:18:06 +00:00
|
|
|
name__icontains="locale"):
|
2015-11-04 14:56:36 +00:00
|
|
|
packages_conf += pkg.name+' '
|
|
|
|
|
|
|
|
packages_conf += "\""
|
2016-04-26 16:18:06 +00:00
|
|
|
|
|
|
|
base_recipe_path = self.get_base_recipe_file()
|
|
|
|
if base_recipe_path:
|
|
|
|
base_recipe = open(base_recipe_path, 'r').read()
|
|
|
|
else:
|
|
|
|
raise IOError("Based on recipe file not found")
|
2015-11-04 14:56:36 +00:00
|
|
|
|
2016-01-27 12:44:54 +00:00
|
|
|
# Add a special case for when the recipe we have based a custom image
|
|
|
|
# recipe on requires another recipe.
|
|
|
|
# For example:
|
|
|
|
# "require core-image-minimal.bb" is changed to:
|
|
|
|
# "require recipes-core/images/core-image-minimal.bb"
|
|
|
|
|
2016-04-01 11:57:58 +00:00
|
|
|
req_search = re.search(r'(require\s+)(.+\.bb\s*$)',
|
2016-04-26 16:18:06 +00:00
|
|
|
base_recipe,
|
|
|
|
re.MULTILINE)
|
2016-04-01 11:57:58 +00:00
|
|
|
if req_search:
|
2016-01-27 12:44:54 +00:00
|
|
|
require_filename = req_search.group(2).strip()
|
|
|
|
|
|
|
|
corrected_location = Recipe.objects.filter(
|
|
|
|
Q(layer_version=self.base_recipe.layer_version) &
|
|
|
|
Q(file_path__icontains=require_filename)).last().file_path
|
|
|
|
|
|
|
|
new_require_line = "require %s" % corrected_location
|
|
|
|
|
2016-04-26 16:18:06 +00:00
|
|
|
base_recipe = base_recipe.replace(req_search.group(0),
|
|
|
|
new_require_line)
|
|
|
|
|
|
|
|
info = {
|
|
|
|
"date": timezone.now().strftime("%Y-%m-%d %H:%M:%S"),
|
|
|
|
"base_recipe": base_recipe,
|
|
|
|
"recipe_name": self.name,
|
|
|
|
"base_recipe_name": self.base_recipe.name,
|
|
|
|
"license": self.license,
|
|
|
|
"summary": self.summary,
|
|
|
|
"description": self.description,
|
|
|
|
"packages_conf": packages_conf.strip()
|
|
|
|
}
|
2015-11-04 14:56:36 +00:00
|
|
|
|
|
|
|
recipe_contents = ("# Original recipe %(base_recipe_name)s \n"
|
|
|
|
"%(base_recipe)s\n\n"
|
|
|
|
"# Recipe %(recipe_name)s \n"
|
|
|
|
"# Customisation Generated by Toaster on %(date)s\n"
|
|
|
|
"SUMMARY = \"%(summary)s\"\n"
|
|
|
|
"DESCRIPTION = \"%(description)s\"\n"
|
|
|
|
"LICENSE = \"%(license)s\"\n"
|
|
|
|
"%(packages_conf)s") % info
|
|
|
|
|
|
|
|
return recipe_contents
|
|
|
|
|
2014-06-03 15:26:17 +00:00
|
|
|
class ProjectVariable(models.Model):
|
|
|
|
project = models.ForeignKey(Project)
|
|
|
|
name = models.CharField(max_length=100)
|
|
|
|
value = models.TextField(blank = True)
|
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class Variable(models.Model):
|
2014-01-07 13:10:42 +00:00
|
|
|
search_allowed_fields = ['variable_name', 'variable_value',
|
2014-01-16 12:22:21 +00:00
|
|
|
'vhistory__file_name', "description"]
|
2013-10-11 12:46:23 +00:00
|
|
|
build = models.ForeignKey(Build, related_name='variable_build')
|
|
|
|
variable_name = models.CharField(max_length=100)
|
|
|
|
variable_value = models.TextField(blank=True)
|
|
|
|
changed = models.BooleanField(default=False)
|
|
|
|
human_readable_name = models.CharField(max_length=200)
|
|
|
|
description = models.TextField(blank=True)
|
|
|
|
|
2013-11-14 13:56:30 +00:00
|
|
|
class VariableHistory(models.Model):
|
2014-01-07 13:10:42 +00:00
|
|
|
variable = models.ForeignKey(Variable, related_name='vhistory')
|
2014-02-20 12:47:55 +00:00
|
|
|
value = models.TextField(blank=True)
|
2013-11-14 13:56:30 +00:00
|
|
|
file_name = models.FilePathField(max_length=255)
|
|
|
|
line_number = models.IntegerField(null=True)
|
2014-04-28 14:11:03 +00:00
|
|
|
operation = models.CharField(max_length=64)
|
2013-10-11 12:46:23 +00:00
|
|
|
|
2014-04-03 10:16:23 +00:00
|
|
|
class HelpText(models.Model):
|
|
|
|
VARIABLE = 0
|
|
|
|
HELPTEXT_AREA = ((VARIABLE, 'variable'), )
|
|
|
|
|
|
|
|
build = models.ForeignKey(Build, related_name='helptext_build')
|
|
|
|
area = models.IntegerField(choices=HELPTEXT_AREA)
|
|
|
|
key = models.CharField(max_length=100)
|
|
|
|
text = models.TextField()
|
|
|
|
|
2013-10-11 12:46:23 +00:00
|
|
|
class LogMessage(models.Model):
|
2014-11-25 10:12:46 +00:00
|
|
|
EXCEPTION = -1 # used to signal self-toaster-exceptions
|
2013-10-11 12:46:23 +00:00
|
|
|
INFO = 0
|
|
|
|
WARNING = 1
|
|
|
|
ERROR = 2
|
2015-10-15 12:45:13 +00:00
|
|
|
CRITICAL = 3
|
|
|
|
|
|
|
|
LOG_LEVEL = (
|
|
|
|
(INFO, "info"),
|
|
|
|
(WARNING, "warn"),
|
|
|
|
(ERROR, "error"),
|
|
|
|
(CRITICAL, "critical"),
|
|
|
|
(EXCEPTION, "toaster exception")
|
|
|
|
)
|
2013-10-11 12:46:23 +00:00
|
|
|
|
|
|
|
build = models.ForeignKey(Build)
|
2014-02-20 12:47:55 +00:00
|
|
|
task = models.ForeignKey(Task, blank = True, null=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
level = models.IntegerField(choices=LOG_LEVEL, default=INFO)
|
2015-10-22 10:22:28 +00:00
|
|
|
message = models.TextField(blank=True, null=True)
|
2013-10-11 12:46:23 +00:00
|
|
|
pathname = models.FilePathField(max_length=255, blank=True)
|
|
|
|
lineno = models.IntegerField(null=True)
|
2015-05-19 15:10:19 +00:00
|
|
|
|
2015-06-17 11:27:48 +00:00
|
|
|
def __str__(self):
|
2016-03-08 18:09:02 +00:00
|
|
|
return force_bytes('%s %s %s' % (self.get_level_display(), self.message, self.build))
|
2015-06-17 11:27:48 +00:00
|
|
|
|
2015-05-19 15:10:19 +00:00
|
|
|
def invalidate_cache(**kwargs):
|
|
|
|
from django.core.cache import cache
|
|
|
|
try:
|
|
|
|
cache.clear()
|
|
|
|
except Exception as e:
|
2015-08-18 16:28:52 +00:00
|
|
|
logger.warning("Problem with cache backend: Failed to clear cache: %s" % e)
|
2015-05-19 15:10:19 +00:00
|
|
|
|
2016-09-16 16:22:29 +00:00
|
|
|
def signal_runbuilds():
|
|
|
|
"""Send SIGUSR1 to runbuilds process"""
|
|
|
|
with open(os.path.join(os.getenv('BUILDDIR'), '.runbuilds.pid')) as pidf:
|
|
|
|
os.kill(int(pidf.read()), SIGUSR1)
|
|
|
|
|
2015-05-19 15:10:19 +00:00
|
|
|
django.db.models.signals.post_save.connect(invalidate_cache)
|
|
|
|
django.db.models.signals.post_delete.connect(invalidate_cache)
|
2016-01-29 14:40:41 +00:00
|
|
|
django.db.models.signals.m2m_changed.connect(invalidate_cache)
|