2012-08-23 15:03:10 +00:00
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
2014-01-05 14:58:09 +00:00
# This bbclass is used for creating archive for:
# 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original"
# 2) patched source: ARCHIVER_MODE[src] = "patched" (default)
# 3) configured source: ARCHIVER_MODE[src] = "configured"
# 4) The patches between do_unpack and do_patch:
# ARCHIVER_MODE[diff] = "1"
# And you can set the one that you'd like to exclude from the diff:
# ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
# 5) The environment data, similar to 'bitbake -e recipe':
# ARCHIVER_MODE[dumpdata] = "1"
# 6) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1"
2014-03-24 05:39:26 +00:00
# 7) Whether output the .src.rpm package:
# ARCHIVER_MODE[srpm] = "1"
# 8) Filter the license, the recipe whose license in
# COPYLEFT_LICENSE_INCLUDE will be included, and in
# COPYLEFT_LICENSE_EXCLUDE will be excluded.
# COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*'
# COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary'
# 9) The recipe type that will be archived:
# COPYLEFT_RECIPE_TYPES = 'target'
2012-08-23 15:03:10 +00:00
#
2014-03-24 05:39:26 +00:00
# Don't filter the license by default
COPYLEFT_LICENSE_INCLUDE ?= ''
COPYLEFT_LICENSE_EXCLUDE ?= ''
# Create archive for all the recipe types
COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian'
inherit copyleft_filter
2012-03-26 10:49:26 +00:00
2014-01-05 14:58:09 +00:00
ARCHIVER_MODE[srpm] ?= "0"
ARCHIVER_MODE[src] ?= "patched"
ARCHIVER_MODE[diff] ?= "0"
ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches"
ARCHIVER_MODE[dumpdata] ?= "0"
ARCHIVER_MODE[recipe] ?= "0"
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources"
ARCHIVER_TOPDIR ?= "${WORKDIR}/deploy-sources"
ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${TARGET_SYS}/${PF}/"
ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/"
do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}"
do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}"
do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
2015-08-17 08:37:47 +00:00
do_deploy_archives[dirs] = "${WORKDIR}"
do_deploy_all_archives[dirs] = "${WORKDIR}"
2014-01-05 14:58:09 +00:00
# This is a convenience for the shell script to use it
2012-06-26 05:59:41 +00:00
2014-01-04 13:02:30 +00:00
python () {
2016-12-14 21:13:04 +00:00
pn = d.getVar('PN')
assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
2016-04-09 22:16:47 +00:00
if pn in assume_provided:
2016-12-14 21:13:04 +00:00
for p in d.getVar("PROVIDES").split():
2016-04-09 22:16:47 +00:00
if p != pn:
pn = p
break
2014-01-04 13:02:30 +00:00
2015-06-09 12:46:48 +00:00
included, reason = copyleft_should_include(d)
if not included:
bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
return
else:
bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
2014-03-24 05:39:26 +00:00
2016-03-24 07:20:28 +00:00
# We just archive gcc-source for all the gcc related recipes
2016-12-14 21:13:04 +00:00
if d.getVar('BPN') in ['gcc', 'libgcc'] \
2016-03-24 07:20:28 +00:00
and not pn.startswith('gcc-source'):
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
return
2016-12-14 21:13:06 +00:00
ar_src = d.getVarFlag('ARCHIVER_MODE', 'src')
ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata')
ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe')
2014-01-05 14:58:09 +00:00
if ar_src == "original":
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
elif ar_src == "patched":
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn)
elif ar_src == "configured":
# We can't use "addtask do_ar_configured after do_configure" since it
# will cause the deptask of do_populate_sysroot to run not matter what
# archives we need, so we add the depends here.
2016-03-22 14:04:02 +00:00
# There is a corner case with "gcc-source-${PV}" recipes, they don't have
# the "do_configure" task, so we need to use "do_preconfigure"
if pn.startswith("gcc-source-"):
d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn)
else:
d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn)
2014-01-05 14:58:09 +00:00
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn)
2016-03-22 14:04:02 +00:00
2014-01-05 14:58:09 +00:00
elif ar_src:
bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src)
if ar_dumpdata == "1":
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn)
if ar_recipe == "1":
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
# Output the srpm package
2016-12-14 21:13:06 +00:00
ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm')
2014-01-05 14:58:09 +00:00
if ar_srpm == "1":
2016-12-14 21:13:04 +00:00
if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm':
2014-01-05 14:58:09 +00:00
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
if ar_dumpdata == "1":
d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
if ar_recipe == "1":
d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn)
if ar_src == "original":
d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn)
elif ar_src == "patched":
d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn)
elif ar_src == "configured":
d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn)
}
2014-01-04 13:02:30 +00:00
2014-01-05 14:58:09 +00:00
# Take all the sources for a recipe and puts them in WORKDIR/archiver-work/.
# Files in SRC_URI are copied directly, anything that's a directory
# (e.g. git repositories) is "unpacked" and then put into a tarball.
python do_ar_original() {
2014-01-04 13:02:30 +00:00
2016-12-08 21:29:57 +00:00
import shutil, tempfile
2014-01-04 13:02:30 +00:00
2016-12-14 21:13:06 +00:00
if d.getVarFlag('ARCHIVER_MODE', 'src') != "original":
2014-01-05 14:58:09 +00:00
return
2014-01-04 13:02:30 +00:00
2016-12-14 21:13:04 +00:00
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
2014-01-05 14:58:09 +00:00
bb.note('Archiving the original source...')
2016-12-14 21:13:04 +00:00
urls = d.getVar("SRC_URI").split()
2016-09-26 09:55:15 +00:00
# destsuffix (git fetcher) and subdir (everything else) are allowed to be
# absolute paths (for example, destsuffix=${S}/foobar).
# That messes with unpacking inside our tmpdir below, because the fetchers
# will then unpack in that directory and completely ignore the tmpdir.
# That breaks parallel tasks relying on ${S}, like do_compile.
#
# To solve this, we remove these parameters from all URLs.
# We do this even for relative paths because it makes the content of the
# archives more useful (no extra paths that are only used during
# compilation).
for i, url in enumerate(urls):
decoded = bb.fetch2.decodeurl(url)
for param in ('destsuffix', 'subdir'):
if param in decoded[5]:
del decoded[5][param]
encoded = bb.fetch2.encodeurl(decoded)
urls[i] = encoded
fetch = bb.fetch2.Fetch(urls, d)
2016-09-26 09:55:16 +00:00
tarball_suffix = {}
2014-01-05 14:58:09 +00:00
for url in fetch.urls:
2014-08-20 08:15:00 +00:00
local = fetch.localpath(url).rstrip("/");
2014-01-05 14:58:09 +00:00
if os.path.isfile(local):
shutil.copy(local, ar_outdir)
elif os.path.isdir(local):
2016-12-14 21:13:04 +00:00
tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
2014-01-05 14:58:09 +00:00
fetch.unpack(tmpdir, (url,))
2016-09-26 09:55:16 +00:00
# To handle recipes with more than one source, we add the "name"
# URL parameter as suffix. We treat it as an error when
# there's more than one URL without a name, or a name gets reused.
# This is an additional safety net, in practice the name has
# to be set when using the git fetcher, otherwise SRCREV cannot
# be set separately for each URL.
params = bb.fetch2.decodeurl(url)[5]
name = params.get('name', '')
if name in tarball_suffix:
if not name:
bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url))
else:
bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url))
tarball_suffix[name] = url
create_tarball(d, tmpdir + '/.', name, ar_outdir)
2014-03-24 05:39:27 +00:00
# Emit patch series files for 'original'
bb.note('Writing patch series files...')
for patch in src_patches(d):
_, _, local, _, _, parm = bb.fetch.decodeurl(patch)
patchdir = parm.get('patchdir')
if patchdir:
series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_'))
else:
series = os.path.join(ar_outdir, 'series')
with open(series, 'a') as s:
s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel']))
2014-01-04 13:02:30 +00:00
}
2014-01-05 14:58:09 +00:00
python do_ar_patched() {
2012-06-26 05:59:41 +00:00
2016-12-14 21:13:06 +00:00
if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched':
2014-01-05 14:58:09 +00:00
return
2012-06-26 05:59:41 +00:00
2014-01-05 14:58:09 +00:00
# Get the ARCHIVER_OUTDIR before we reset the WORKDIR
2016-12-14 21:13:04 +00:00
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
ar_workdir = d.getVar('ARCHIVER_WORKDIR')
2014-01-05 14:58:09 +00:00
bb.note('Archiving the patched source...')
2016-03-22 14:04:01 +00:00
d.setVar('WORKDIR', ar_workdir)
2016-12-14 21:13:04 +00:00
create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
2014-01-05 14:58:09 +00:00
}
2012-08-23 15:03:10 +00:00
2014-01-05 14:58:09 +00:00
python do_ar_configured() {
2012-08-23 15:03:10 +00:00
import shutil
2016-12-14 21:13:04 +00:00
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
2016-12-14 21:13:06 +00:00
if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured':
2014-01-05 14:58:09 +00:00
bb.note('Archiving the configured source...')
2016-12-14 21:13:04 +00:00
pn = d.getVar('PN')
2016-03-22 14:04:02 +00:00
# "gcc-source-${PV}" recipes don't have "do_configure"
# task, so we need to run "do_preconfigure" instead
if pn.startswith("gcc-source-"):
2016-12-14 21:13:04 +00:00
d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
2016-03-22 14:04:02 +00:00
bb.build.exec_func('do_preconfigure', d)
2014-01-05 14:58:09 +00:00
# The libtool-native's do_configure will remove the
# ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
# do_configure, we archive the already configured ${S} to
# instead of.
2016-03-22 14:04:02 +00:00
elif pn != 'libtool-native':
2014-01-05 14:58:09 +00:00
# Change the WORKDIR to make do_configure run in another dir.
2016-12-14 21:13:04 +00:00
d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
2014-01-05 14:58:09 +00:00
if bb.data.inherits_class('kernel-yocto', d):
bb.build.exec_func('do_kernel_configme', d)
if bb.data.inherits_class('cmake', d):
bb.build.exec_func('do_generate_toolchain_file', d)
2016-12-14 21:13:06 +00:00
prefuncs = d.getVarFlag('do_configure', 'prefuncs')
2014-01-05 14:58:09 +00:00
for func in (prefuncs or '').split():
if func != "sysroot_cleansstate":
bb.build.exec_func(func, d)
bb.build.exec_func('do_configure', d)
2016-12-14 21:13:06 +00:00
postfuncs = d.getVarFlag('do_configure', 'postfuncs')
2014-01-05 14:58:09 +00:00
for func in (postfuncs or '').split():
if func != "do_qa_configure":
bb.build.exec_func(func, d)
2016-12-14 21:13:04 +00:00
srcdir = d.getVar('S')
builddir = d.getVar('B')
2014-01-05 14:58:09 +00:00
if srcdir != builddir:
if os.path.exists(builddir):
oe.path.copytree(builddir, os.path.join(srcdir, \
2016-12-14 21:13:04 +00:00
'build.%s.ar_configured' % d.getVar('PF')))
2014-01-05 14:58:09 +00:00
create_tarball(d, srcdir, 'configured', ar_outdir)
}
2012-03-26 10:49:26 +00:00
2015-09-23 21:36:07 +00:00
def create_tarball(d, srcdir, suffix, ar_outdir):
2012-08-23 15:08:22 +00:00
"""
2014-01-05 14:58:09 +00:00
create the tarball from srcdir
2012-08-23 15:08:22 +00:00
"""
2012-08-23 15:03:10 +00:00
import tarfile
archiver.bbclass: Fixes and improves archiver class for kernel and gcc packages
gcc packages use a shared source directory, this causes an issue since the archiver will
try to patch the same source several times (one for each gcc package), producing an error,
the archiver class used stamp-base to check this, nonetheless our gcc packages no longer
use stamp-base, they use gcc-shared instead, which is what broke this functionality.
This patch adds a check to see whether or not the source should be patched,
avoiding patching the source when it shouldn't.
Also, we dont need to create multiple identical tarballs for all gcc packages,
this patch fixes this and creates a single source tarball for gcc.
When requesting patched sources, a race condition is created for linux-yocto tasks,
unpack_and_patch is executed along with kernel_configme, which most of the time
causes errors during configure, since kernel_configme task is specific to the kernel,
simply modifying the tasks order by creating a dependency to kernel_configme was impossible,
causing errors on all other packages that didnt use kernel_configme, this is fixed by
creating a special case for the kernel, adding tasks with correct dependencies,
avoiding the race condition and behaving the way it should for all other packages as well.
[YOCTO #8378]
(From OE-Core rev: aecaa0e8739db1c228a6db78225a717d9f348a5b)
Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2015-10-06 23:05:09 +00:00
# Make sure we are only creating a single tarball for gcc sources
2016-12-14 21:13:04 +00:00
if (d.getVar('SRC_URI') == ""):
archiver.bbclass: Fixes and improves archiver class for kernel and gcc packages
gcc packages use a shared source directory, this causes an issue since the archiver will
try to patch the same source several times (one for each gcc package), producing an error,
the archiver class used stamp-base to check this, nonetheless our gcc packages no longer
use stamp-base, they use gcc-shared instead, which is what broke this functionality.
This patch adds a check to see whether or not the source should be patched,
avoiding patching the source when it shouldn't.
Also, we dont need to create multiple identical tarballs for all gcc packages,
this patch fixes this and creates a single source tarball for gcc.
When requesting patched sources, a race condition is created for linux-yocto tasks,
unpack_and_patch is executed along with kernel_configme, which most of the time
causes errors during configure, since kernel_configme task is specific to the kernel,
simply modifying the tasks order by creating a dependency to kernel_configme was impossible,
causing errors on all other packages that didnt use kernel_configme, this is fixed by
creating a special case for the kernel, adding tasks with correct dependencies,
avoiding the race condition and behaving the way it should for all other packages as well.
[YOCTO #8378]
(From OE-Core rev: aecaa0e8739db1c228a6db78225a717d9f348a5b)
Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2015-10-06 23:05:09 +00:00
return
2014-01-05 14:58:09 +00:00
bb.utils.mkdirhier(ar_outdir)
2016-03-22 14:04:03 +00:00
if suffix:
2016-12-14 21:13:04 +00:00
filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
2016-03-22 14:04:03 +00:00
else:
2016-12-14 21:13:04 +00:00
filename = '%s.tar.gz' % d.getVar('PF')
2016-03-22 14:04:03 +00:00
tarname = os.path.join(ar_outdir, filename)
2012-03-26 10:49:26 +00:00
2014-01-05 14:58:09 +00:00
bb.note('Creating %s' % tarname)
tar = tarfile.open(tarname, 'w:gz')
2016-12-08 21:29:57 +00:00
tar.add(srcdir, arcname=os.path.basename(srcdir))
2014-01-05 14:58:09 +00:00
tar.close()
2012-08-23 15:03:10 +00:00
2014-01-05 14:58:09 +00:00
# creating .diff.gz between source.orig and source
def create_diff_gz(d, src_orig, src, ar_outdir):
2012-08-23 15:03:10 +00:00
2014-01-05 14:58:09 +00:00
import subprocess
2012-08-23 15:03:10 +00:00
2014-01-05 14:58:09 +00:00
if not os.path.isdir(src) or not os.path.isdir(src_orig):
2012-08-23 15:03:10 +00:00
return
2012-03-26 10:49:26 +00:00
2014-01-05 14:58:09 +00:00
# The diff --exclude can't exclude the file with path, so we copy
# the patched source, and remove the files that we'd like to
# exclude.
src_patched = src + '.patched'
oe.path.copyhardlinktree(src, src_patched)
2016-12-14 21:13:06 +00:00
for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split():
2014-01-05 14:58:09 +00:00
bb.utils.remove(os.path.join(src_orig, i), recurse=True)
bb.utils.remove(os.path.join(src_patched, i), recurse=True)
dirname = os.path.dirname(src)
basename = os.path.basename(src)
os.chdir(dirname)
2016-12-14 21:13:04 +00:00
out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
2014-01-05 14:58:09 +00:00
diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
subprocess.call(diff_cmd, shell=True)
bb.utils.remove(src_patched, recurse=True)
# Run do_unpack and do_patch
python do_unpack_and_patch() {
2016-12-14 21:13:06 +00:00
if d.getVarFlag('ARCHIVER_MODE', 'src') not in \
2014-01-05 14:58:09 +00:00
[ 'patched', 'configured'] and \
2016-12-14 21:13:06 +00:00
d.getVarFlag('ARCHIVER_MODE', 'diff') != '1':
2014-01-05 14:58:09 +00:00
return
2016-12-14 21:13:04 +00:00
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
ar_workdir = d.getVar('ARCHIVER_WORKDIR')
pn = d.getVar('PN')
2014-06-16 03:22:57 +00:00
2015-11-09 22:40:50 +00:00
# The kernel class functions require it to be on work-shared, so we dont change WORKDIR
2016-10-10 18:32:05 +00:00
if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')):
2016-03-22 14:04:01 +00:00
# Change the WORKDIR to make do_unpack do_patch run in another dir.
d.setVar('WORKDIR', ar_workdir)
2014-01-05 14:58:09 +00:00
2016-03-22 14:04:01 +00:00
# The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
# possibly requiring of the following tasks (such as some recipes's
# do_patch required 'B' existed).
2016-12-14 21:13:04 +00:00
bb.utils.mkdirhier(d.getVar('B'))
2016-03-22 14:04:01 +00:00
bb.build.exec_func('do_unpack', d)
2015-11-09 22:40:50 +00:00
2014-01-05 14:58:09 +00:00
# Save the original source for creating the patches
2016-12-14 21:13:06 +00:00
if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
2016-12-14 21:13:04 +00:00
src = d.getVar('S').rstrip('/')
2014-01-05 14:58:09 +00:00
src_orig = '%s.orig' % src
oe.path.copytree(src, src_orig)
archiver.bbclass: Fixes and improves archiver class for kernel and gcc packages
gcc packages use a shared source directory, this causes an issue since the archiver will
try to patch the same source several times (one for each gcc package), producing an error,
the archiver class used stamp-base to check this, nonetheless our gcc packages no longer
use stamp-base, they use gcc-shared instead, which is what broke this functionality.
This patch adds a check to see whether or not the source should be patched,
avoiding patching the source when it shouldn't.
Also, we dont need to create multiple identical tarballs for all gcc packages,
this patch fixes this and creates a single source tarball for gcc.
When requesting patched sources, a race condition is created for linux-yocto tasks,
unpack_and_patch is executed along with kernel_configme, which most of the time
causes errors during configure, since kernel_configme task is specific to the kernel,
simply modifying the tasks order by creating a dependency to kernel_configme was impossible,
causing errors on all other packages that didnt use kernel_configme, this is fixed by
creating a special case for the kernel, adding tasks with correct dependencies,
avoiding the race condition and behaving the way it should for all other packages as well.
[YOCTO #8378]
(From OE-Core rev: aecaa0e8739db1c228a6db78225a717d9f348a5b)
Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2015-10-06 23:05:09 +00:00
2015-11-09 22:40:50 +00:00
# Make sure gcc and kernel sources are patched only once
2016-12-14 21:13:04 +00:00
if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))):
archiver.bbclass: Fixes and improves archiver class for kernel and gcc packages
gcc packages use a shared source directory, this causes an issue since the archiver will
try to patch the same source several times (one for each gcc package), producing an error,
the archiver class used stamp-base to check this, nonetheless our gcc packages no longer
use stamp-base, they use gcc-shared instead, which is what broke this functionality.
This patch adds a check to see whether or not the source should be patched,
avoiding patching the source when it shouldn't.
Also, we dont need to create multiple identical tarballs for all gcc packages,
this patch fixes this and creates a single source tarball for gcc.
When requesting patched sources, a race condition is created for linux-yocto tasks,
unpack_and_patch is executed along with kernel_configme, which most of the time
causes errors during configure, since kernel_configme task is specific to the kernel,
simply modifying the tasks order by creating a dependency to kernel_configme was impossible,
causing errors on all other packages that didnt use kernel_configme, this is fixed by
creating a special case for the kernel, adding tasks with correct dependencies,
avoiding the race condition and behaving the way it should for all other packages as well.
[YOCTO #8378]
(From OE-Core rev: aecaa0e8739db1c228a6db78225a717d9f348a5b)
Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2015-10-06 23:05:09 +00:00
bb.build.exec_func('do_patch', d)
2014-01-05 14:58:09 +00:00
# Create the patches
2016-12-14 21:13:06 +00:00
if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
2014-01-05 14:58:09 +00:00
bb.note('Creating diff gz...')
create_diff_gz(d, src_orig, src, ar_outdir)
bb.utils.remove(src_orig, recurse=True)
}
2012-08-23 15:03:10 +00:00
2014-01-05 14:58:09 +00:00
python do_ar_recipe () {
2012-08-23 15:08:22 +00:00
"""
2014-01-05 14:58:09 +00:00
archive the recipe, including .bb and .inc.
2012-08-23 15:08:22 +00:00
"""
2014-01-05 14:58:09 +00:00
import re
2013-02-20 13:50:22 +00:00
import shutil
2012-08-23 15:03:10 +00:00
2014-01-05 14:58:09 +00:00
require_re = re.compile( r"require\s+(.+)" )
include_re = re.compile( r"include\s+(.+)" )
2016-12-14 21:13:04 +00:00
bbfile = d.getVar('FILE')
outdir = os.path.join(d.getVar('WORKDIR'), \
'%s-recipe' % d.getVar('PF'))
2014-01-05 14:58:09 +00:00
bb.utils.mkdirhier(outdir)
shutil.copy(bbfile, outdir)
2016-12-14 21:13:04 +00:00
pn = d.getVar('PN')
bbappend_files = d.getVar('BBINCLUDED').split()
2015-11-13 08:21:46 +00:00
# If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
# Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn)
bbappend_re1 = re.compile( r".*/%s\.bbappend$" %pn)
for file in bbappend_files:
if bbappend_re.match(file) or bbappend_re1.match(file):
shutil.copy(file, outdir)
2014-01-05 14:58:09 +00:00
dirname = os.path.dirname(bbfile)
2016-12-14 21:13:04 +00:00
bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
2014-01-05 14:58:09 +00:00
f = open(bbfile, 'r')
for line in f.readlines():
incfile = None
if require_re.match(line):
incfile = require_re.match(line).group(1)
elif include_re.match(line):
incfile = include_re.match(line).group(1)
if incfile:
2017-03-17 15:53:09 +00:00
incfile = d.expand(incfile)
2014-01-05 14:58:09 +00:00
incfile = bb.utils.which(bbpath, incfile)
if incfile:
shutil.copy(incfile, outdir)
2016-12-14 21:13:04 +00:00
create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
2014-01-05 14:58:09 +00:00
bb.utils.remove(outdir, recurse=True)
}
2012-03-26 10:49:26 +00:00
2014-01-05 14:58:09 +00:00
python do_dumpdata () {
2012-08-23 15:08:22 +00:00
"""
2014-01-05 14:58:09 +00:00
dump environment data to ${PF}-showdata.dump
2012-08-23 15:08:22 +00:00
"""
2012-08-23 15:03:10 +00:00
2016-12-14 21:13:04 +00:00
dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
'%s-showdata.dump' % d.getVar('PF'))
2014-01-05 14:58:09 +00:00
bb.note('Dumping metadata into %s' % dumpfile)
2016-03-22 14:04:00 +00:00
with open(dumpfile, "w") as f:
# emit variables and shell functions
bb.data.emit_env(f, d, True)
# emit the metadata which isn't valid shell
for e in d.keys():
if d.getVarFlag(e, "python", False):
f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False)))
2012-03-26 10:49:26 +00:00
}
2014-01-04 13:02:30 +00:00
2014-01-05 14:58:09 +00:00
SSTATETASKS += "do_deploy_archives"
do_deploy_archives () {
2016-04-13 19:57:42 +00:00
echo "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}."
2014-01-04 13:02:30 +00:00
}
2014-01-05 14:58:09 +00:00
python do_deploy_archives_setscene () {
2014-01-04 13:02:30 +00:00
sstate_setscene(d)
}
2016-04-12 14:44:08 +00:00
do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}"
2014-01-05 14:58:09 +00:00
do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}"
do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}"
2016-03-30 07:30:06 +00:00
addtask do_deploy_archives_setscene
2014-01-05 14:58:09 +00:00
addtask do_ar_original after do_unpack
2015-11-09 22:40:50 +00:00
addtask do_unpack_and_patch after do_patch
2014-01-05 14:58:09 +00:00
addtask do_ar_patched after do_unpack_and_patch
addtask do_ar_configured after do_unpack_and_patch
addtask do_dumpdata
addtask do_ar_recipe
addtask do_deploy_archives before do_build
2015-07-09 08:35:26 +00:00
addtask do_deploy_all_archives after do_deploy_archives
do_deploy_all_archives[recrdeptask] = "do_deploy_archives"
do_deploy_all_archives[recideptask] = "do_${BB_DEFAULT_TASK}"
do_deploy_all_archives() {
:
}
archiver.bbclass: Fixes and improves archiver class for kernel and gcc packages
gcc packages use a shared source directory, this causes an issue since the archiver will
try to patch the same source several times (one for each gcc package), producing an error,
the archiver class used stamp-base to check this, nonetheless our gcc packages no longer
use stamp-base, they use gcc-shared instead, which is what broke this functionality.
This patch adds a check to see whether or not the source should be patched,
avoiding patching the source when it shouldn't.
Also, we dont need to create multiple identical tarballs for all gcc packages,
this patch fixes this and creates a single source tarball for gcc.
When requesting patched sources, a race condition is created for linux-yocto tasks,
unpack_and_patch is executed along with kernel_configme, which most of the time
causes errors during configure, since kernel_configme task is specific to the kernel,
simply modifying the tasks order by creating a dependency to kernel_configme was impossible,
causing errors on all other packages that didnt use kernel_configme, this is fixed by
creating a special case for the kernel, adding tasks with correct dependencies,
avoiding the race condition and behaving the way it should for all other packages as well.
[YOCTO #8378]
(From OE-Core rev: aecaa0e8739db1c228a6db78225a717d9f348a5b)
Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2015-10-06 23:05:09 +00:00
python () {
# Add tasks in the correct order, specifically for linux-yocto to avoid race condition
if bb.data.inherits_class('kernel-yocto', d):
bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d)
}