meta: remove True option to getVar calls

getVar() now defaults to expanding by default, thus remove the True
option from getVar() calls with a regex search and replace.

Search made with the following regex: getVar ?\(( ?[^,()]*), True\)

(From OE-Core rev: 7c552996597faaee2fbee185b250c0ee30ea3b5f)

Signed-off-by: Joshua Lock <joshua.g.lock@intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Joshua Lock 2016-12-14 21:13:04 +00:00 committed by Richard Purdie
parent d5e67725ac
commit c4e2c59088
261 changed files with 2305 additions and 2305 deletions

View File

@ -11,7 +11,7 @@ PACKAGE_ARCH = "all"
python () { python () {
# Allow this class to be included but overridden - only set # Allow this class to be included but overridden - only set
# the values if we're still "all" package arch. # the values if we're still "all" package arch.
if d.getVar("PACKAGE_ARCH", True) == "all": if d.getVar("PACKAGE_ARCH") == "all":
# No need for virtual/libc or a cross compiler # No need for virtual/libc or a cross compiler
d.setVar("INHIBIT_DEFAULT_DEPS","1") d.setVar("INHIBIT_DEFAULT_DEPS","1")
@ -47,6 +47,6 @@ python () {
d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS") d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS")
d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS") d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS")
elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d): elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d):
bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE", True)) bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE"))
} }

View File

@ -52,10 +52,10 @@ do_deploy_all_archives[dirs] = "${WORKDIR}"
python () { python () {
pn = d.getVar('PN', True) pn = d.getVar('PN')
assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split() assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
if pn in assume_provided: if pn in assume_provided:
for p in d.getVar("PROVIDES", True).split(): for p in d.getVar("PROVIDES").split():
if p != pn: if p != pn:
pn = p pn = p
break break
@ -68,7 +68,7 @@ python () {
bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
# We just archive gcc-source for all the gcc related recipes # We just archive gcc-source for all the gcc related recipes
if d.getVar('BPN', True) in ['gcc', 'libgcc'] \ if d.getVar('BPN') in ['gcc', 'libgcc'] \
and not pn.startswith('gcc-source'): and not pn.startswith('gcc-source'):
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
return return
@ -106,7 +106,7 @@ python () {
# Output the srpm package # Output the srpm package
ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True)
if ar_srpm == "1": if ar_srpm == "1":
if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm':
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
if ar_dumpdata == "1": if ar_dumpdata == "1":
d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn)
@ -130,9 +130,9 @@ python do_ar_original() {
if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original":
return return
ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) ar_outdir = d.getVar('ARCHIVER_OUTDIR')
bb.note('Archiving the original source...') bb.note('Archiving the original source...')
urls = d.getVar("SRC_URI", True).split() urls = d.getVar("SRC_URI").split()
# destsuffix (git fetcher) and subdir (everything else) are allowed to be # destsuffix (git fetcher) and subdir (everything else) are allowed to be
# absolute paths (for example, destsuffix=${S}/foobar). # absolute paths (for example, destsuffix=${S}/foobar).
# That messes with unpacking inside our tmpdir below, because the fetchers # That messes with unpacking inside our tmpdir below, because the fetchers
@ -157,7 +157,7 @@ python do_ar_original() {
if os.path.isfile(local): if os.path.isfile(local):
shutil.copy(local, ar_outdir) shutil.copy(local, ar_outdir)
elif os.path.isdir(local): elif os.path.isdir(local):
tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR'))
fetch.unpack(tmpdir, (url,)) fetch.unpack(tmpdir, (url,))
# To handle recipes with more than one source, we add the "name" # To handle recipes with more than one source, we add the "name"
# URL parameter as suffix. We treat it as an error when # URL parameter as suffix. We treat it as an error when
@ -195,24 +195,24 @@ python do_ar_patched() {
return return
# Get the ARCHIVER_OUTDIR before we reset the WORKDIR # Get the ARCHIVER_OUTDIR before we reset the WORKDIR
ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) ar_outdir = d.getVar('ARCHIVER_OUTDIR')
ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) ar_workdir = d.getVar('ARCHIVER_WORKDIR')
bb.note('Archiving the patched source...') bb.note('Archiving the patched source...')
d.setVar('WORKDIR', ar_workdir) d.setVar('WORKDIR', ar_workdir)
create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) create_tarball(d, d.getVar('S'), 'patched', ar_outdir)
} }
python do_ar_configured() { python do_ar_configured() {
import shutil import shutil
ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) ar_outdir = d.getVar('ARCHIVER_OUTDIR')
if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured':
bb.note('Archiving the configured source...') bb.note('Archiving the configured source...')
pn = d.getVar('PN', True) pn = d.getVar('PN')
# "gcc-source-${PV}" recipes don't have "do_configure" # "gcc-source-${PV}" recipes don't have "do_configure"
# task, so we need to run "do_preconfigure" instead # task, so we need to run "do_preconfigure" instead
if pn.startswith("gcc-source-"): if pn.startswith("gcc-source-"):
d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
bb.build.exec_func('do_preconfigure', d) bb.build.exec_func('do_preconfigure', d)
# The libtool-native's do_configure will remove the # The libtool-native's do_configure will remove the
@ -221,7 +221,7 @@ python do_ar_configured() {
# instead of. # instead of.
elif pn != 'libtool-native': elif pn != 'libtool-native':
# Change the WORKDIR to make do_configure run in another dir. # Change the WORKDIR to make do_configure run in another dir.
d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
if bb.data.inherits_class('kernel-yocto', d): if bb.data.inherits_class('kernel-yocto', d):
bb.build.exec_func('do_kernel_configme', d) bb.build.exec_func('do_kernel_configme', d)
if bb.data.inherits_class('cmake', d): if bb.data.inherits_class('cmake', d):
@ -235,12 +235,12 @@ python do_ar_configured() {
for func in (postfuncs or '').split(): for func in (postfuncs or '').split():
if func != "do_qa_configure": if func != "do_qa_configure":
bb.build.exec_func(func, d) bb.build.exec_func(func, d)
srcdir = d.getVar('S', True) srcdir = d.getVar('S')
builddir = d.getVar('B', True) builddir = d.getVar('B')
if srcdir != builddir: if srcdir != builddir:
if os.path.exists(builddir): if os.path.exists(builddir):
oe.path.copytree(builddir, os.path.join(srcdir, \ oe.path.copytree(builddir, os.path.join(srcdir, \
'build.%s.ar_configured' % d.getVar('PF', True))) 'build.%s.ar_configured' % d.getVar('PF')))
create_tarball(d, srcdir, 'configured', ar_outdir) create_tarball(d, srcdir, 'configured', ar_outdir)
} }
@ -251,14 +251,14 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
import tarfile import tarfile
# Make sure we are only creating a single tarball for gcc sources # Make sure we are only creating a single tarball for gcc sources
if (d.getVar('SRC_URI', True) == ""): if (d.getVar('SRC_URI') == ""):
return return
bb.utils.mkdirhier(ar_outdir) bb.utils.mkdirhier(ar_outdir)
if suffix: if suffix:
filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix) filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
else: else:
filename = '%s.tar.gz' % d.getVar('PF', True) filename = '%s.tar.gz' % d.getVar('PF')
tarname = os.path.join(ar_outdir, filename) tarname = os.path.join(ar_outdir, filename)
bb.note('Creating %s' % tarname) bb.note('Creating %s' % tarname)
@ -286,7 +286,7 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
dirname = os.path.dirname(src) dirname = os.path.dirname(src)
basename = os.path.basename(src) basename = os.path.basename(src)
os.chdir(dirname) os.chdir(dirname)
out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF'))
diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file)
subprocess.call(diff_cmd, shell=True) subprocess.call(diff_cmd, shell=True)
bb.utils.remove(src_patched, recurse=True) bb.utils.remove(src_patched, recurse=True)
@ -297,9 +297,9 @@ python do_unpack_and_patch() {
[ 'patched', 'configured'] and \ [ 'patched', 'configured'] and \
d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1':
return return
ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) ar_outdir = d.getVar('ARCHIVER_OUTDIR')
ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) ar_workdir = d.getVar('ARCHIVER_WORKDIR')
pn = d.getVar('PN', True) pn = d.getVar('PN')
# The kernel class functions require it to be on work-shared, so we dont change WORKDIR # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')):
@ -309,18 +309,18 @@ python do_unpack_and_patch() {
# The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
# possibly requiring of the following tasks (such as some recipes's # possibly requiring of the following tasks (such as some recipes's
# do_patch required 'B' existed). # do_patch required 'B' existed).
bb.utils.mkdirhier(d.getVar('B', True)) bb.utils.mkdirhier(d.getVar('B'))
bb.build.exec_func('do_unpack', d) bb.build.exec_func('do_unpack', d)
# Save the original source for creating the patches # Save the original source for creating the patches
if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
src = d.getVar('S', True).rstrip('/') src = d.getVar('S').rstrip('/')
src_orig = '%s.orig' % src src_orig = '%s.orig' % src
oe.path.copytree(src, src_orig) oe.path.copytree(src, src_orig)
# Make sure gcc and kernel sources are patched only once # Make sure gcc and kernel sources are patched only once
if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))):
bb.build.exec_func('do_patch', d) bb.build.exec_func('do_patch', d)
# Create the patches # Create the patches
@ -339,14 +339,14 @@ python do_ar_recipe () {
require_re = re.compile( r"require\s+(.+)" ) require_re = re.compile( r"require\s+(.+)" )
include_re = re.compile( r"include\s+(.+)" ) include_re = re.compile( r"include\s+(.+)" )
bbfile = d.getVar('FILE', True) bbfile = d.getVar('FILE')
outdir = os.path.join(d.getVar('WORKDIR', True), \ outdir = os.path.join(d.getVar('WORKDIR'), \
'%s-recipe' % d.getVar('PF', True)) '%s-recipe' % d.getVar('PF'))
bb.utils.mkdirhier(outdir) bb.utils.mkdirhier(outdir)
shutil.copy(bbfile, outdir) shutil.copy(bbfile, outdir)
pn = d.getVar('PN', True) pn = d.getVar('PN')
bbappend_files = d.getVar('BBINCLUDED', True).split() bbappend_files = d.getVar('BBINCLUDED').split()
# If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend
# Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded.
bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn) bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" %pn)
@ -356,7 +356,7 @@ python do_ar_recipe () {
shutil.copy(file, outdir) shutil.copy(file, outdir)
dirname = os.path.dirname(bbfile) dirname = os.path.dirname(bbfile)
bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) bbpath = '%s:%s' % (dirname, d.getVar('BBPATH'))
f = open(bbfile, 'r') f = open(bbfile, 'r')
for line in f.readlines(): for line in f.readlines():
incfile = None incfile = None
@ -370,7 +370,7 @@ python do_ar_recipe () {
if incfile: if incfile:
shutil.copy(incfile, outdir) shutil.copy(incfile, outdir)
create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR'))
bb.utils.remove(outdir, recurse=True) bb.utils.remove(outdir, recurse=True)
} }
@ -379,8 +379,8 @@ python do_dumpdata () {
dump environment data to ${PF}-showdata.dump dump environment data to ${PF}-showdata.dump
""" """
dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \
'%s-showdata.dump' % d.getVar('PF', True)) '%s-showdata.dump' % d.getVar('PF'))
bb.note('Dumping metadata into %s' % dumpfile) bb.note('Dumping metadata into %s' % dumpfile)
with open(dumpfile, "w") as f: with open(dumpfile, "w") as f:
# emit variables and shell functions # emit variables and shell functions

View File

@ -1,8 +1,8 @@
def autotools_dep_prepend(d): def autotools_dep_prepend(d):
if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): if d.getVar('INHIBIT_AUTOTOOLS_DEPS'):
return '' return ''
pn = d.getVar('PN', True) pn = d.getVar('PN')
deps = '' deps = ''
if pn in ['autoconf-native', 'automake-native', 'help2man-native']: if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
@ -14,7 +14,7 @@ def autotools_dep_prepend(d):
if not bb.data.inherits_class('native', d) \ if not bb.data.inherits_class('native', d) \
and not bb.data.inherits_class('nativesdk', d) \ and not bb.data.inherits_class('nativesdk', d) \
and not bb.data.inherits_class('cross', d) \ and not bb.data.inherits_class('cross', d) \
and not d.getVar('INHIBIT_DEFAULT_DEPS', True): and not d.getVar('INHIBIT_DEFAULT_DEPS'):
deps += 'libtool-cross ' deps += 'libtool-cross '
return deps + 'gnu-config-native ' return deps + 'gnu-config-native '
@ -139,15 +139,15 @@ ACLOCALDIR = "${WORKDIR}/aclocal-copy"
python autotools_copy_aclocals () { python autotools_copy_aclocals () {
import copy import copy
s = d.getVar("AUTOTOOLS_SCRIPT_PATH", True) s = d.getVar("AUTOTOOLS_SCRIPT_PATH")
if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"):
if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): if not d.getVar("AUTOTOOLS_COPYACLOCAL", False):
return return
taskdepdata = d.getVar("BB_TASKDEPDATA", False) taskdepdata = d.getVar("BB_TASKDEPDATA", False)
#bb.warn(str(taskdepdata)) #bb.warn(str(taskdepdata))
pn = d.getVar("PN", True) pn = d.getVar("PN")
aclocaldir = d.getVar("ACLOCALDIR", True) aclocaldir = d.getVar("ACLOCALDIR")
oe.path.remove(aclocaldir) oe.path.remove(aclocaldir)
bb.utils.mkdirhier(aclocaldir) bb.utils.mkdirhier(aclocaldir)
start = None start = None

View File

@ -16,7 +16,7 @@ OE_IMPORTS[type] = "list"
def oe_import(d): def oe_import(d):
import sys import sys
bbpath = d.getVar("BBPATH", True).split(":") bbpath = d.getVar("BBPATH").split(":")
sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
def inject(name, value): def inject(name, value):
@ -37,7 +37,7 @@ def oe_import(d):
OE_IMPORTED := "${@oe_import(d)}" OE_IMPORTED := "${@oe_import(d)}"
def lsb_distro_identifier(d): def lsb_distro_identifier(d):
adjust = d.getVar('LSB_DISTRO_ADJUST', True) adjust = d.getVar('LSB_DISTRO_ADJUST')
adjust_func = None adjust_func = None
if adjust: if adjust:
try: try:
@ -72,7 +72,7 @@ def base_dep_prepend(d):
# we need that built is the responsibility of the patch function / class, not # we need that built is the responsibility of the patch function / class, not
# the application. # the application.
if not d.getVar('INHIBIT_DEFAULT_DEPS', False): if not d.getVar('INHIBIT_DEFAULT_DEPS', False):
if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')):
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
return deps return deps
@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} "
FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}"
# THISDIR only works properly with imediate expansion as it has to run # THISDIR only works properly with imediate expansion as it has to run
# in the context of the location its used (:=) # in the context of the location its used (:=)
THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" THISDIR = "${@os.path.dirname(d.getVar('FILE'))}"
def extra_path_elements(d): def extra_path_elements(d):
path = "" path = ""
elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() elements = (d.getVar('EXTRANATIVEPATH') or "").split()
for e in elements: for e in elements:
path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":"
return path return path
@ -96,11 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}"
def get_lic_checksum_file_list(d): def get_lic_checksum_file_list(d):
filelist = [] filelist = []
lic_files = d.getVar("LIC_FILES_CHKSUM", True) or '' lic_files = d.getVar("LIC_FILES_CHKSUM") or ''
tmpdir = d.getVar("TMPDIR", True) tmpdir = d.getVar("TMPDIR")
s = d.getVar("S", True) s = d.getVar("S")
b = d.getVar("B", True) b = d.getVar("B")
workdir = d.getVar("WORKDIR", True) workdir = d.getVar("WORKDIR")
urls = lic_files.split() urls = lic_files.split()
for url in urls: for url in urls:
@ -116,7 +116,7 @@ def get_lic_checksum_file_list(d):
continue continue
filelist.append(path + ":" + str(os.path.exists(path))) filelist.append(path + ":" + str(os.path.exists(path)))
except bb.fetch.MalformedUrl: except bb.fetch.MalformedUrl:
bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url)
return " ".join(filelist) return " ".join(filelist)
addtask fetch addtask fetch
@ -126,7 +126,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
do_fetch[vardeps] += "SRCREV" do_fetch[vardeps] += "SRCREV"
python base_do_fetch() { python base_do_fetch() {
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI') or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
@ -141,31 +141,31 @@ addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}" do_unpack[dirs] = "${WORKDIR}"
python () { python () {
if d.getVar('S', True) != d.getVar('WORKDIR', True): if d.getVar('S') != d.getVar('WORKDIR'):
d.setVarFlag('do_unpack', 'cleandirs', '${S}') d.setVarFlag('do_unpack', 'cleandirs', '${S}')
else: else:
d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches'))
} }
python base_do_unpack() { python base_do_unpack() {
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI') or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
try: try:
fetcher = bb.fetch2.Fetch(src_uri, d) fetcher = bb.fetch2.Fetch(src_uri, d)
fetcher.unpack(d.getVar('WORKDIR', True)) fetcher.unpack(d.getVar('WORKDIR'))
except bb.fetch2.BBFetchException as e: except bb.fetch2.BBFetchException as e:
bb.fatal(str(e)) bb.fatal(str(e))
} }
def pkgarch_mapping(d): def pkgarch_mapping(d):
# Compatibility mappings of TUNE_PKGARCH (opt in) # Compatibility mappings of TUNE_PKGARCH (opt in)
if d.getVar("PKGARCHCOMPAT_ARMV7A", True): if d.getVar("PKGARCHCOMPAT_ARMV7A"):
if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon":
d.setVar("TUNE_PKGARCH", "armv7a") d.setVar("TUNE_PKGARCH", "armv7a")
def get_layers_branch_rev(d): def get_layers_branch_rev(d):
layers = (d.getVar("BBLAYERS", True) or "").split() layers = (d.getVar("BBLAYERS") or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \ base_get_metadata_git_revision(i, None)) \
@ -192,7 +192,7 @@ BUILDCFG_FUNCS[type] = "list"
def buildcfg_vars(d): def buildcfg_vars(d):
statusvars = oe.data.typed_value('BUILDCFG_VARS', d) statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
for var in statusvars: for var in statusvars:
value = d.getVar(var, True) value = d.getVar(var)
if value is not None: if value is not None:
yield '%-17s = "%s"' % (var, value) yield '%-17s = "%s"' % (var, value)
@ -200,7 +200,7 @@ def buildcfg_neededvars(d):
needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
pesteruser = [] pesteruser = []
for v in needed_vars: for v in needed_vars:
val = d.getVar(v, True) val = d.getVar(v)
if not val or val == 'INVALID': if not val or val == 'INVALID':
pesteruser.append(v) pesteruser.append(v)
@ -233,7 +233,7 @@ python base_eventhandler() {
if flines: if flines:
statuslines.extend(flines) statuslines.extend(flines)
statusheader = e.data.getVar('BUILDCFG_HEADER', True) statusheader = e.data.getVar('BUILDCFG_HEADER')
if statusheader: if statusheader:
bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
@ -241,7 +241,7 @@ python base_eventhandler() {
# target ones and we'd see dulpicate key names overwriting each other # target ones and we'd see dulpicate key names overwriting each other
# for various PREFERRED_PROVIDERS # for various PREFERRED_PROVIDERS
if isinstance(e, bb.event.RecipePreFinalise): if isinstance(e, bb.event.RecipePreFinalise):
if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True): if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"):
e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils")
e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial")
e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc")
@ -267,14 +267,14 @@ python base_eventhandler() {
# sysroot since they're now "unreachable". This makes switching virtual/kernel work in # sysroot since they're now "unreachable". This makes switching virtual/kernel work in
# particular. # particular.
# #
pn = d.getVar('PN', True) pn = d.getVar('PN')
source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
if not source_mirror_fetch: if not source_mirror_fetch:
provs = (d.getVar("PROVIDES", True) or "").split() provs = (d.getVar("PROVIDES") or "").split()
multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
for p in provs: for p in provs:
if p.startswith("virtual/") and p not in multiwhitelist: if p.startswith("virtual/") and p not in multiwhitelist:
profprov = d.getVar("PREFERRED_PROVIDER_" + p, True) profprov = d.getVar("PREFERRED_PROVIDER_" + p)
if profprov and pn != profprov: if profprov and pn != profprov:
raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn))
} }
@ -336,9 +336,9 @@ def set_packagetriplet(d):
tos = [] tos = []
tvs = [] tvs = []
archs.append(d.getVar("PACKAGE_ARCHS", True).split()) archs.append(d.getVar("PACKAGE_ARCHS").split())
tos.append(d.getVar("TARGET_OS", True)) tos.append(d.getVar("TARGET_OS"))
tvs.append(d.getVar("TARGET_VENDOR", True)) tvs.append(d.getVar("TARGET_VENDOR"))
def settriplet(d, varname, archs, tos, tvs): def settriplet(d, varname, archs, tos, tvs):
triplets = [] triplets = []
@ -350,16 +350,16 @@ def set_packagetriplet(d):
settriplet(d, "PKGTRIPLETS", archs, tos, tvs) settriplet(d, "PKGTRIPLETS", archs, tos, tvs)
variants = d.getVar("MULTILIB_VARIANTS", True) or "" variants = d.getVar("MULTILIB_VARIANTS") or ""
for item in variants.split(): for item in variants.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides) localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) archs.append(localdata.getVar("PACKAGE_ARCHS").split())
tos.append(localdata.getVar("TARGET_OS", True)) tos.append(localdata.getVar("TARGET_OS"))
tvs.append(localdata.getVar("TARGET_VENDOR", True)) tvs.append(localdata.getVar("TARGET_VENDOR"))
settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs)
@ -374,10 +374,10 @@ python () {
# PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
if pkgconfigflags: if pkgconfigflags:
pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() pkgconfig = (d.getVar('PACKAGECONFIG') or "").split()
pn = d.getVar("PN", True) pn = d.getVar("PN")
mlprefix = d.getVar("MLPREFIX", True) mlprefix = d.getVar("MLPREFIX")
def expandFilter(appends, extension, prefix): def expandFilter(appends, extension, prefix):
appends = bb.utils.explode_deps(d.expand(" ".join(appends))) appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
@ -419,7 +419,7 @@ python () {
num = len(items) num = len(items)
if num > 4: if num > 4:
bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!"
% (d.getVar('PN', True), flag)) % (d.getVar('PN'), flag))
if flag in pkgconfig: if flag in pkgconfig:
if num >= 3 and items[2]: if num >= 3 and items[2]:
@ -434,8 +434,8 @@ python () {
appendVar('RDEPENDS_${PN}', extrardeps) appendVar('RDEPENDS_${PN}', extrardeps)
appendVar('PACKAGECONFIG_CONFARGS', extraconf) appendVar('PACKAGECONFIG_CONFARGS', extraconf)
pn = d.getVar('PN', True) pn = d.getVar('PN')
license = d.getVar('LICENSE', True) license = d.getVar('LICENSE')
if license == "INVALID": if license == "INVALID":
bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
@ -465,26 +465,26 @@ python () {
d.setVarFlag('do_devshell', 'fakeroot', '1') d.setVarFlag('do_devshell', 'fakeroot', '1')
d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
need_machine = d.getVar('COMPATIBLE_MACHINE', True) need_machine = d.getVar('COMPATIBLE_MACHINE')
if need_machine: if need_machine:
import re import re
compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":")
for m in compat_machines: for m in compat_machines:
if re.match(need_machine, m): if re.match(need_machine, m):
break break
else: else:
raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE'))
source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False)
if not source_mirror_fetch: if not source_mirror_fetch:
need_host = d.getVar('COMPATIBLE_HOST', True) need_host = d.getVar('COMPATIBLE_HOST')
if need_host: if need_host:
import re import re
this_host = d.getVar('HOST_SYS', True) this_host = d.getVar('HOST_SYS')
if not re.match(need_host, this_host): if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host)
bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
check_license = False if pn.startswith("nativesdk-") else True check_license = False if pn.startswith("nativesdk-") else True
for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}",
@ -503,21 +503,21 @@ python () {
for lic in bad_licenses: for lic in bad_licenses:
spdx_license = return_spdx(d, lic) spdx_license = return_spdx(d, lic)
for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]:
whitelist.extend((d.getVar(w + lic, True) or "").split()) whitelist.extend((d.getVar(w + lic) or "").split())
if spdx_license: if spdx_license:
whitelist.extend((d.getVar(w + spdx_license, True) or "").split()) whitelist.extend((d.getVar(w + spdx_license) or "").split())
''' '''
We need to track what we are whitelisting and why. If pn is We need to track what we are whitelisting and why. If pn is
incompatible we need to be able to note that the image that incompatible we need to be able to note that the image that
is created may infact contain incompatible licenses despite is created may infact contain incompatible licenses despite
INCOMPATIBLE_LICENSE being set. INCOMPATIBLE_LICENSE being set.
''' '''
incompatwl.extend((d.getVar(w + lic, True) or "").split()) incompatwl.extend((d.getVar(w + lic) or "").split())
if spdx_license: if spdx_license:
incompatwl.extend((d.getVar(w + spdx_license, True) or "").split()) incompatwl.extend((d.getVar(w + spdx_license) or "").split())
if not pn in whitelist: if not pn in whitelist:
pkgs = d.getVar('PACKAGES', True).split() pkgs = d.getVar('PACKAGES').split()
skipped_pkgs = [] skipped_pkgs = []
unskipped_pkgs = [] unskipped_pkgs = []
for pkg in pkgs: for pkg in pkgs:
@ -529,7 +529,7 @@ python () {
if unskipped_pkgs: if unskipped_pkgs:
for pkg in skipped_pkgs: for pkg in skipped_pkgs:
bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license)
mlprefix = d.getVar('MLPREFIX', True) mlprefix = d.getVar('MLPREFIX')
d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
for pkg in unskipped_pkgs: for pkg in unskipped_pkgs:
bb.debug(1, "INCLUDING the package " + pkg) bb.debug(1, "INCLUDING the package " + pkg)
@ -545,8 +545,8 @@ python () {
# matching of license expressions - just check that all license strings # matching of license expressions - just check that all license strings
# in LICENSE_<pkg> are found in LICENSE. # in LICENSE_<pkg> are found in LICENSE.
license_set = oe.license.list_licenses(license) license_set = oe.license.list_licenses(license)
for pkg in d.getVar('PACKAGES', True).split(): for pkg in d.getVar('PACKAGES').split():
pkg_license = d.getVar('LICENSE_' + pkg, True) pkg_license = d.getVar('LICENSE_' + pkg)
if pkg_license: if pkg_license:
unlisted = oe.license.list_licenses(pkg_license) - license_set unlisted = oe.license.list_licenses(pkg_license) - license_set
if unlisted: if unlisted:
@ -554,7 +554,7 @@ python () {
"listed in LICENSE" % (pkg, ' '.join(unlisted))) "listed in LICENSE" % (pkg, ' '.join(unlisted)))
needsrcrev = False needsrcrev = False
srcuri = d.getVar('SRC_URI', True) srcuri = d.getVar('SRC_URI')
for uri in srcuri.split(): for uri in srcuri.split():
(scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
@ -614,8 +614,8 @@ python () {
set_packagetriplet(d) set_packagetriplet(d)
# 'multimachine' handling # 'multimachine' handling
mach_arch = d.getVar('MACHINE_ARCH', True) mach_arch = d.getVar('MACHINE_ARCH')
pkg_arch = d.getVar('PACKAGE_ARCH', True) pkg_arch = d.getVar('PACKAGE_ARCH')
if (pkg_arch == mach_arch): if (pkg_arch == mach_arch):
# Already machine specific - nothing further to do # Already machine specific - nothing further to do
@ -625,11 +625,11 @@ python () {
# We always try to scan SRC_URI for urls with machine overrides # We always try to scan SRC_URI for urls with machine overrides
# unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
# #
override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH')
if override != '0': if override != '0':
paths = [] paths = []
fpaths = (d.getVar('FILESPATH', True) or '').split(':') fpaths = (d.getVar('FILESPATH') or '').split(':')
machine = d.getVar('MACHINE', True) machine = d.getVar('MACHINE')
for p in fpaths: for p in fpaths:
if os.path.basename(p) == machine and os.path.isdir(p): if os.path.basename(p) == machine and os.path.isdir(p):
paths.append(p) paths.append(p)
@ -646,16 +646,16 @@ python () {
d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
return return
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
for pkg in packages: for pkg in packages:
pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg)
# We could look for != PACKAGE_ARCH here but how to choose # We could look for != PACKAGE_ARCH here but how to choose
# if multiple differences are present? # if multiple differences are present?
# Look through PACKAGE_ARCHS for the priority order? # Look through PACKAGE_ARCHS for the priority order?
if pkgarch and pkgarch == mach_arch: if pkgarch and pkgarch == mach_arch:
d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}")
bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN"))
} }
addtask cleansstate after do_clean addtask cleansstate after do_clean
@ -666,7 +666,7 @@ addtask cleanall after do_cleansstate
do_cleansstate[nostamp] = "1" do_cleansstate[nostamp] = "1"
python do_cleanall() { python do_cleanall() {
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI') or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return

View File

@ -22,7 +22,7 @@ def get_binconfig_mangle(d):
s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'"
s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'"
if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False): if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False):
s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE", True) s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE")
return s return s

View File

@ -16,7 +16,7 @@
addhandler blacklist_multilib_eventhandler addhandler blacklist_multilib_eventhandler
blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed" blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed"
python blacklist_multilib_eventhandler() { python blacklist_multilib_eventhandler() {
multilibs = e.data.getVar('MULTILIBS', True) multilibs = e.data.getVar('MULTILIBS')
if not multilibs: if not multilibs:
return return
@ -38,7 +38,7 @@ python blacklist_multilib_eventhandler() {
} }
python () { python () {
blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN', True), True) blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN'), True)
if blacklist: if blacklist:
raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist))

View File

@ -110,12 +110,12 @@ python bugzilla_eventhandler() {
return return
if name == "TaskFailed": if name == "TaskFailed":
xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) xmlrpc = data.getVar("BUGZILLA_XMLRPC")
user = data.getVar("BUGZILLA_USER", True) user = data.getVar("BUGZILLA_USER")
passw = data.getVar("BUGZILLA_PASS", True) passw = data.getVar("BUGZILLA_PASS")
product = data.getVar("BUGZILLA_PRODUCT", True) product = data.getVar("BUGZILLA_PRODUCT")
compon = data.getVar("BUGZILLA_COMPONENT", True) compon = data.getVar("BUGZILLA_COMPONENT")
version = data.getVar("BUGZILLA_VERSION", True) version = data.getVar("BUGZILLA_VERSION")
proxy = data.getVar('http_proxy', True ) proxy = data.getVar('http_proxy', True )
if (proxy): if (proxy):
@ -133,14 +133,14 @@ python bugzilla_eventhandler() {
'component': compon} 'component': compon}
# evil hack to figure out what is going on # evil hack to figure out what is going on
debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a") debug_file = open(os.path.join(data.getVar("TMPDIR"),"..","bugzilla-log"),"a")
file = None file = None
bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True), bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN"),
"pv" : data.getVar("PV", True), "pv" : data.getVar("PV"),
} }
log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task))
text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) ) text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar( 'MACHINE', True ) )
if len(log_file) != 0: if len(log_file) != 0:
print >> debug_file, "Adding log file %s" % log_file[0] print >> debug_file, "Adding log file %s" % log_file[0]
file = open(log_file[0], 'r') file = open(log_file[0], 'r')
@ -168,7 +168,7 @@ python bugzilla_eventhandler() {
if bug_number and log: if bug_number and log:
print >> debug_file, "The bug is known as '%s'" % bug_number print >> debug_file, "The bug is known as '%s'" % bug_number
desc = "Build log for machine %s" % (data.getVar('MACHINE', True)) desc = "Build log for machine %s" % (data.getVar('MACHINE'))
if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc):
print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number
else: else:

View File

@ -64,18 +64,18 @@ PATCH_GIT_USER_NAME ?= "OpenEmbedded"
# Write out metadata about this package for comparison when writing future packages # Write out metadata about this package for comparison when writing future packages
# #
python buildhistory_emit_pkghistory() { python buildhistory_emit_pkghistory() {
if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']:
return 0 return 0
if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split():
return 0 return 0
import re import re
import json import json
import errno import errno
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE', True) oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE')
class RecipeInfo: class RecipeInfo:
def __init__(self, name): def __init__(self, name):
@ -182,12 +182,12 @@ python buildhistory_emit_pkghistory() {
items.sort() items.sort()
return ' '.join(items) return ' '.join(items)
pn = d.getVar('PN', True) pn = d.getVar('PN')
pe = d.getVar('PE', True) or "0" pe = d.getVar('PE') or "0"
pv = d.getVar('PV', True) pv = d.getVar('PV')
pr = d.getVar('PR', True) pr = d.getVar('PR')
pkgdata_dir = d.getVar('PKGDATA_DIR', True) pkgdata_dir = d.getVar('PKGDATA_DIR')
packages = "" packages = ""
try: try:
with open(os.path.join(pkgdata_dir, pn)) as f: with open(os.path.join(pkgdata_dir, pn)) as f:
@ -203,7 +203,7 @@ python buildhistory_emit_pkghistory() {
raise raise
packagelist = packages.split() packagelist = packages.split()
preserve = d.getVar('BUILDHISTORY_PRESERVE', True).split() preserve = d.getVar('BUILDHISTORY_PRESERVE').split()
if not os.path.exists(pkghistdir): if not os.path.exists(pkghistdir):
bb.utils.mkdirhier(pkghistdir) bb.utils.mkdirhier(pkghistdir)
else: else:
@ -223,11 +223,11 @@ python buildhistory_emit_pkghistory() {
rcpinfo.pe = pe rcpinfo.pe = pe
rcpinfo.pv = pv rcpinfo.pv = pv
rcpinfo.pr = pr rcpinfo.pr = pr
rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or "")) rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or ""))
rcpinfo.packages = packages rcpinfo.packages = packages
write_recipehistory(rcpinfo, d) write_recipehistory(rcpinfo, d)
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
for pkg in packagelist: for pkg in packagelist:
pkgdata = {} pkgdata = {}
with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
@ -293,7 +293,7 @@ python buildhistory_emit_pkghistory() {
def write_recipehistory(rcpinfo, d): def write_recipehistory(rcpinfo, d):
bb.debug(2, "Writing recipe history") bb.debug(2, "Writing recipe history")
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
infofile = os.path.join(pkghistdir, "latest") infofile = os.path.join(pkghistdir, "latest")
with open(infofile, "w") as f: with open(infofile, "w") as f:
@ -308,7 +308,7 @@ def write_recipehistory(rcpinfo, d):
def write_pkghistory(pkginfo, d): def write_pkghistory(pkginfo, d):
bb.debug(2, "Writing package history for package %s" % pkginfo.name) bb.debug(2, "Writing package history for package %s" % pkginfo.name)
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
pkgpath = os.path.join(pkghistdir, pkginfo.name) pkgpath = os.path.join(pkghistdir, pkginfo.name)
if not os.path.exists(pkgpath): if not os.path.exists(pkgpath):
@ -369,7 +369,7 @@ def buildhistory_list_installed(d, rootfs_type="image"):
pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target") pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target")
for output_type, output_file in process_list: for output_type, output_file in process_list:
output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) output_file_full = os.path.join(d.getVar('WORKDIR'), output_file)
with open(output_file_full, 'w') as output: with open(output_file_full, 'w') as output:
output.write(format_pkg_list(pkgs, output_type)) output.write(format_pkg_list(pkgs, output_type))
@ -550,7 +550,7 @@ END
python buildhistory_get_extra_sdkinfo() { python buildhistory_get_extra_sdkinfo() {
import operator import operator
import math import math
if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
tasksizes = {} tasksizes = {}
filesizes = {} filesizes = {}
for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')): for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')):
@ -591,7 +591,7 @@ SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_e
SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
def buildhistory_get_build_id(d): def buildhistory_get_build_id(d):
if d.getVar('BB_WORKERCONTEXT', True) != '1': if d.getVar('BB_WORKERCONTEXT') != '1':
return "" return ""
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
@ -605,12 +605,12 @@ def buildhistory_get_build_id(d):
if flines: if flines:
statuslines.extend(flines) statuslines.extend(flines)
statusheader = d.getVar('BUILDCFG_HEADER', True) statusheader = d.getVar('BUILDCFG_HEADER')
return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
def buildhistory_get_metadata_revs(d): def buildhistory_get_metadata_revs(d):
# We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want
layers = (d.getVar("BBLAYERS", True) or "").split() layers = (d.getVar("BBLAYERS") or "").split()
medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \ base_get_metadata_git_revision(i, None)) \
@ -622,7 +622,7 @@ def outputvars(vars, listvars, d):
listvars = listvars.split() listvars = listvars.split()
ret = "" ret = ""
for var in vars: for var in vars:
value = d.getVar(var, True) or "" value = d.getVar(var) or ""
if var in listvars: if var in listvars:
# Squash out spaces # Squash out spaces
value = oe.utils.squashspaces(value) value = oe.utils.squashspaces(value)
@ -630,17 +630,17 @@ def outputvars(vars, listvars, d):
return ret.rstrip('\n') return ret.rstrip('\n')
def buildhistory_get_imagevars(d): def buildhistory_get_imagevars(d):
if d.getVar('BB_WORKERCONTEXT', True) != '1': if d.getVar('BB_WORKERCONTEXT') != '1':
return "" return ""
imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND"
listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE"
return outputvars(imagevars, listvars, d) return outputvars(imagevars, listvars, d)
def buildhistory_get_sdkvars(d): def buildhistory_get_sdkvars(d):
if d.getVar('BB_WORKERCONTEXT', True) != '1': if d.getVar('BB_WORKERCONTEXT') != '1':
return "" return ""
sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
# Extensible SDK uses some additional variables # Extensible SDK uses some additional variables
sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN" sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN"
listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST" listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST"
@ -735,16 +735,16 @@ END
} }
python buildhistory_eventhandler() { python buildhistory_eventhandler() {
if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): if e.data.getVar('BUILDHISTORY_FEATURES').strip():
reset = e.data.getVar("BUILDHISTORY_RESET", True) reset = e.data.getVar("BUILDHISTORY_RESET")
olddir = e.data.getVar("BUILDHISTORY_OLD_DIR", True) olddir = e.data.getVar("BUILDHISTORY_OLD_DIR")
if isinstance(e, bb.event.BuildStarted): if isinstance(e, bb.event.BuildStarted):
if reset: if reset:
import shutil import shutil
# Clean up after potentially interrupted build. # Clean up after potentially interrupted build.
if os.path.isdir(olddir): if os.path.isdir(olddir):
shutil.rmtree(olddir) shutil.rmtree(olddir)
rootdir = e.data.getVar("BUILDHISTORY_DIR", True) rootdir = e.data.getVar("BUILDHISTORY_DIR")
entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ] entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ]
bb.utils.mkdirhier(olddir) bb.utils.mkdirhier(olddir)
for entry in entries: for entry in entries:
@ -754,7 +754,7 @@ python buildhistory_eventhandler() {
if reset: if reset:
import shutil import shutil
shutil.rmtree(olddir) shutil.rmtree(olddir)
if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": if e.data.getVar("BUILDHISTORY_COMMIT") == "1":
bb.note("Writing buildhistory") bb.note("Writing buildhistory")
localdata = bb.data.createCopy(e.data) localdata = bb.data.createCopy(e.data)
localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures))
@ -774,7 +774,7 @@ def _get_srcrev_values(d):
""" """
scms = [] scms = []
fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud urldata = fetcher.ud
for u in urldata: for u in urldata:
if urldata[u].method.supports_srcrev(): if urldata[u].method.supports_srcrev():
@ -806,7 +806,7 @@ def _get_srcrev_values(d):
do_fetch[postfuncs] += "write_srcrev" do_fetch[postfuncs] += "write_srcrev"
do_fetch[vardepsexclude] += "write_srcrev" do_fetch[vardepsexclude] += "write_srcrev"
python write_srcrev() { python write_srcrev() {
pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE')
srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') srcrevfile = os.path.join(pkghistdir, 'latest_srcrev')
srcrevs, tag_srcrevs = _get_srcrev_values(d) srcrevs, tag_srcrevs = _get_srcrev_values(d)
@ -838,7 +838,7 @@ python write_srcrev() {
for name, srcrev in tag_srcrevs.items(): for name, srcrev in tag_srcrevs.items():
f.write('# tag_%s = "%s"\n' % (name, srcrev)) f.write('# tag_%s = "%s"\n' % (name, srcrev))
if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
pkg = d.getVar('PN', True) pkg = d.getVar('PN')
bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
else: else:

View File

@ -7,7 +7,7 @@ python buildstats_summary () {
if not os.path.exists(bsdir): if not os.path.exists(bsdir):
return return
sstatetasks = (e.data.getVar('SSTATETASKS', True) or '').split() sstatetasks = (e.data.getVar('SSTATETASKS') or '').split()
built = collections.defaultdict(lambda: [set(), set()]) built = collections.defaultdict(lambda: [set(), set()])
for pf in os.listdir(bsdir): for pf in os.listdir(bsdir):
taskdir = os.path.join(bsdir, pf) taskdir = os.path.join(bsdir, pf)

View File

@ -75,8 +75,8 @@ def get_buildtimedata(var, d):
return timediff, cpuperc return timediff, cpuperc
def write_task_data(status, logfile, e, d): def write_task_data(status, logfile, e, d):
bn = d.getVar('BUILDNAME', True) bn = d.getVar('BUILDNAME')
bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
with open(os.path.join(logfile), "a") as f: with open(os.path.join(logfile), "a") as f:
elapsedtime = get_timedata("__timedata_task", d, e.time) elapsedtime = get_timedata("__timedata_task", d, e.time)
if elapsedtime: if elapsedtime:
@ -106,9 +106,9 @@ python run_buildstats () {
import bb.event import bb.event
import time, subprocess, platform import time, subprocess, platform
bn = d.getVar('BUILDNAME', True) bn = d.getVar('BUILDNAME')
bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn)
taskdir = os.path.join(bsdir, d.getVar('PF', True)) taskdir = os.path.join(bsdir, d.getVar('PF'))
if isinstance(e, bb.event.BuildStarted): if isinstance(e, bb.event.BuildStarted):
######################################################################## ########################################################################
@ -162,7 +162,7 @@ python run_buildstats () {
if e.task == "do_rootfs": if e.task == "do_rootfs":
bs = os.path.join(bsdir, "build_stats") bs = os.path.join(bsdir, "build_stats")
with open(bs, "a") as f: with open(bs, "a") as f:
rootfs = d.getVar('IMAGE_ROOTFS', True) rootfs = d.getVar('IMAGE_ROOTFS')
if os.path.isdir(rootfs): if os.path.isdir(rootfs):
try: try:
rootfs_size = subprocess.check_output(["du", "-sh", rootfs], rootfs_size = subprocess.check_output(["du", "-sh", rootfs],
@ -197,7 +197,7 @@ python runqueue_stats () {
# are available that we need to find the output directory. # are available that we need to find the output directory.
# The persistent SystemStats is stored in the datastore and # The persistent SystemStats is stored in the datastore and
# closed when the build is done. # closed when the build is done.
system_stats = d.getVar('_buildstats_system_stats', True) system_stats = d.getVar('_buildstats_system_stats')
if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)):
system_stats = buildstats.SystemStats(d) system_stats = buildstats.SystemStats(d)
d.setVar('_buildstats_system_stats', system_stats) d.setVar('_buildstats_system_stats', system_stats)

View File

@ -1,4 +1,4 @@
CCACHE = "${@bb.utils.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}"
export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}"
CCACHE_DISABLE[unexport] = "1" CCACHE_DISABLE[unexport] = "1"

View File

@ -44,7 +44,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d):
p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE)
out, err = p.communicate() out, err = p.communicate()
if p.returncode != 0: if p.returncode != 0:
bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN', True), p.returncode, out, err)) bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err))
def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d):
import subprocess as sub import subprocess as sub
@ -72,7 +72,7 @@ def process_dir (rootdir, directory, d):
cmd = d.expand('${CHRPATH_BIN}') cmd = d.expand('${CHRPATH_BIN}')
tmpdir = os.path.normpath(d.getVar('TMPDIR', False)) tmpdir = os.path.normpath(d.getVar('TMPDIR', False))
baseprefix = os.path.normpath(d.expand('${base_prefix}')) baseprefix = os.path.normpath(d.expand('${base_prefix}'))
hostos = d.getVar("HOST_OS", True) hostos = d.getVar("HOST_OS")
#bb.debug("Checking %s for binaries to process" % directory) #bb.debug("Checking %s for binaries to process" % directory)
if not os.path.exists(directory): if not os.path.exists(directory):

View File

@ -46,7 +46,7 @@ cmake_do_generate_toolchain_file() {
# CMake system name must be something like "Linux". # CMake system name must be something like "Linux".
# This is important for cross-compiling. # This is important for cross-compiling.
set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` ) set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).*/\1/'` )
set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH', True))} ) set( CMAKE_SYSTEM_PROCESSOR ${@map_target_arch_to_uname_arch(d.getVar('TARGET_ARCH'))} )
set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} ) set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} )
set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} ) set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} )
set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} ) set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} )
@ -112,15 +112,15 @@ cmake_do_configure() {
${OECMAKE_SITEFILE} \ ${OECMAKE_SITEFILE} \
${OECMAKE_SOURCEPATH} \ ${OECMAKE_SOURCEPATH} \
-DCMAKE_INSTALL_PREFIX:PATH=${prefix} \ -DCMAKE_INSTALL_PREFIX:PATH=${prefix} \
-DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir', True), d.getVar('prefix', True))} \ -DCMAKE_INSTALL_BINDIR:PATH=${@os.path.relpath(d.getVar('bindir'), d.getVar('prefix'))} \
-DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir', True), d.getVar('prefix', True))} \ -DCMAKE_INSTALL_SBINDIR:PATH=${@os.path.relpath(d.getVar('sbindir'), d.getVar('prefix'))} \
-DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir', True), d.getVar('prefix', True))} \ -DCMAKE_INSTALL_LIBEXECDIR:PATH=${@os.path.relpath(d.getVar('libexecdir'), d.getVar('prefix'))} \
-DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \ -DCMAKE_INSTALL_SYSCONFDIR:PATH=${sysconfdir} \
-DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir', True), d. getVar('prefix', True))} \ -DCMAKE_INSTALL_SHAREDSTATEDIR:PATH=${@os.path.relpath(d.getVar('sharedstatedir'), d. getVar('prefix'))} \
-DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \ -DCMAKE_INSTALL_LOCALSTATEDIR:PATH=${localstatedir} \
-DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir', True), d.getVar('prefix', True))} \ -DCMAKE_INSTALL_LIBDIR:PATH=${@os.path.relpath(d.getVar('libdir'), d.getVar('prefix'))} \
-DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir', True), d.getVar('prefix', True))} \ -DCMAKE_INSTALL_INCLUDEDIR:PATH=${@os.path.relpath(d.getVar('includedir'), d.getVar('prefix'))} \
-DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir', True), d.getVar('prefix', True))} \ -DCMAKE_INSTALL_DATAROOTDIR:PATH=${@os.path.relpath(d.getVar('datadir'), d.getVar('prefix'))} \
-DCMAKE_INSTALL_SO_NO_EXE=0 \ -DCMAKE_INSTALL_SO_NO_EXE=0 \
-DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \ -DCMAKE_TOOLCHAIN_FILE=${WORKDIR}/toolchain.cmake \
-DCMAKE_VERBOSE_MAKEFILE=1 \ -DCMAKE_VERBOSE_MAKEFILE=1 \

View File

@ -26,7 +26,7 @@ python do_menuconfig() {
except OSError: except OSError:
mtime = 0 mtime = 0
oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND', True), oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'),
d.getVar('PN', True ) + ' Configuration', d) d.getVar('PN', True ) + ' Configuration', d)
# FIXME this check can be removed when the minimum bitbake version has been bumped # FIXME this check can be removed when the minimum bitbake version has been bumped
@ -49,7 +49,7 @@ python do_diffconfig() {
import shutil import shutil
import subprocess import subprocess
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
fragment = workdir + '/fragment.cfg' fragment = workdir + '/fragment.cfg'
configorig = '.config.orig' configorig = '.config.orig'
config = '.config' config = '.config'

View File

@ -31,25 +31,25 @@ DOC_DECOMPRESS_CMD[xz] ?= "unxz -v"
PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives" PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives"
python package_do_compress_doc() { python package_do_compress_doc() {
compress_mode = d.getVar('DOC_COMPRESS', True) compress_mode = d.getVar('DOC_COMPRESS')
compress_list = (d.getVar('DOC_COMPRESS_LIST', True) or '').split() compress_list = (d.getVar('DOC_COMPRESS_LIST') or '').split()
if compress_mode not in compress_list: if compress_mode not in compress_list:
bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list)) bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list))
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
compress_cmds = {} compress_cmds = {}
decompress_cmds = {} decompress_cmds = {}
for mode in compress_list: for mode in compress_list:
compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True) compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True)
decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True) decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True)
mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir", True)) mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir"))
if os.path.exists(mandir): if os.path.exists(mandir):
# Decompress doc files which format is not compress_mode # Decompress doc files which format is not compress_mode
decompress_doc(mandir, compress_mode, decompress_cmds) decompress_doc(mandir, compress_mode, decompress_cmds)
compress_doc(mandir, compress_mode, compress_cmds) compress_doc(mandir, compress_mode, compress_cmds)
infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir", True)) infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir"))
if os.path.exists(infodir): if os.path.exists(infodir):
# Decompress doc files which format is not compress_mode # Decompress doc files which format is not compress_mode
decompress_doc(infodir, compress_mode, decompress_cmds) decompress_doc(infodir, compress_mode, decompress_cmds)
@ -218,18 +218,18 @@ python compress_doc_updatealternatives () {
if not bb.data.inherits_class('update-alternatives', d): if not bb.data.inherits_class('update-alternatives', d):
return return
mandir = d.getVar("mandir", True) mandir = d.getVar("mandir")
infodir = d.getVar("infodir", True) infodir = d.getVar("infodir")
compress_mode = d.getVar('DOC_COMPRESS', True) compress_mode = d.getVar('DOC_COMPRESS')
for pkg in (d.getVar('PACKAGES', True) or "").split(): for pkg in (d.getVar('PACKAGES') or "").split():
old_names = (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split() old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split()
new_names = [] new_names = []
for old_name in old_names: for old_name in old_names:
old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True) old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True)
old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \ old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \
d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \ d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \
d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or \ d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \
d.getVar('ALTERNATIVE_TARGET', True) or \ d.getVar('ALTERNATIVE_TARGET') or \
old_link old_link
# Sometimes old_target is specified as relative to the link name. # Sometimes old_target is specified as relative to the link name.
old_target = os.path.join(os.path.dirname(old_link), old_target) old_target = os.path.join(os.path.dirname(old_link), old_target)
@ -247,7 +247,7 @@ python compress_doc_updatealternatives () {
elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True): elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True):
d.delVarFlag('ALTERNATIVE_TARGET', old_name) d.delVarFlag('ALTERNATIVE_TARGET', old_name)
d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target) d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target)
elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True): elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg):
d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target) d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target)
elif d.getVar('ALTERNATIVE_TARGET', old_name, True): elif d.getVar('ALTERNATIVE_TARGET', old_name, True):
d.setVar('ALTERNATIVE_TARGET', new_target) d.setVar('ALTERNATIVE_TARGET', new_target)

View File

@ -13,7 +13,7 @@ python do_prepare_copyleft_sources () {
import os.path import os.path
import shutil import shutil
p = d.getVar('P', True) p = d.getVar('P')
included, reason = copyleft_should_include(d) included, reason = copyleft_should_include(d)
if not included: if not included:
bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason)) bb.debug(1, 'copyleft: %s is excluded: %s' % (p, reason))
@ -21,13 +21,13 @@ python do_prepare_copyleft_sources () {
else: else:
bb.debug(1, 'copyleft: %s is included: %s' % (p, reason)) bb.debug(1, 'copyleft: %s is included: %s' % (p, reason))
sources_dir = d.getVar('COPYLEFT_SOURCES_DIR', True) sources_dir = d.getVar('COPYLEFT_SOURCES_DIR')
dl_dir = d.getVar('DL_DIR', True) dl_dir = d.getVar('DL_DIR')
src_uri = d.getVar('SRC_URI', True).split() src_uri = d.getVar('SRC_URI').split()
fetch = bb.fetch2.Fetch(src_uri, d) fetch = bb.fetch2.Fetch(src_uri, d)
ud = fetch.ud ud = fetch.ud
pf = d.getVar('PF', True) pf = d.getVar('PF')
dest = os.path.join(sources_dir, pf) dest = os.path.join(sources_dir, pf)
shutil.rmtree(dest, ignore_errors=True) shutil.rmtree(dest, ignore_errors=True)
bb.utils.mkdirhier(dest) bb.utils.mkdirhier(dest)

View File

@ -49,7 +49,7 @@ def copyleft_should_include(d):
included, motive = False, 'recipe did not match anything' included, motive = False, 'recipe did not match anything'
recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE', True) recipe_type = d.getVar('COPYLEFT_RECIPE_TYPE')
if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d): if recipe_type not in oe.data.typed_value('COPYLEFT_RECIPE_TYPES', d):
include, motive = False, 'recipe type "%s" is excluded' % recipe_type include, motive = False, 'recipe type "%s" is excluded' % recipe_type
@ -57,9 +57,9 @@ def copyleft_should_include(d):
exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d) exclude = oe.data.typed_value('COPYLEFT_LICENSE_EXCLUDE', d)
try: try:
is_included, reason = oe.license.is_included(d.getVar('LICENSE', True), include, exclude) is_included, reason = oe.license.is_included(d.getVar('LICENSE'), include, exclude)
except oe.license.LicenseError as exc: except oe.license.LicenseError as exc:
bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) bb.fatal('%s: %s' % (d.getVar('PF'), exc))
else: else:
if is_included: if is_included:
if reason: if reason:
@ -69,10 +69,10 @@ def copyleft_should_include(d):
else: else:
included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason) included, motive = False, 'recipe has excluded licenses: %s' % ', '.join(reason)
if any(fnmatch(d.getVar('PN', True), name) \ if any(fnmatch(d.getVar('PN'), name) \
for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)): for name in oe.data.typed_value('COPYLEFT_PN_INCLUDE', d)):
included, motive = True, 'recipe included by name' included, motive = True, 'recipe included by name'
if any(fnmatch(d.getVar('PN', True), name) \ if any(fnmatch(d.getVar('PN'), name) \
for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)): for name in oe.data.typed_value('COPYLEFT_PN_EXCLUDE', d)):
included, motive = False, 'recipe excluded by name' included, motive = False, 'recipe excluded by name'

View File

@ -20,25 +20,25 @@ CANADIANEXTRAOS = "${BASECANADIANEXTRAOS}"
CANADIANEXTRAVENDOR = "" CANADIANEXTRAVENDOR = ""
MODIFYTOS ??= "1" MODIFYTOS ??= "1"
python () { python () {
archs = d.getVar('PACKAGE_ARCHS', True).split() archs = d.getVar('PACKAGE_ARCHS').split()
sdkarchs = [] sdkarchs = []
for arch in archs: for arch in archs:
sdkarchs.append(arch + '-${SDKPKGSUFFIX}') sdkarchs.append(arch + '-${SDKPKGSUFFIX}')
d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs)) d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs))
# Allow the following code segment to be disabled, e.g. meta-environment # Allow the following code segment to be disabled, e.g. meta-environment
if d.getVar("MODIFYTOS", True) != "1": if d.getVar("MODIFYTOS") != "1":
return return
if d.getVar("TCLIBC", True) == "baremetal": if d.getVar("TCLIBC") == "baremetal":
return return
tos = d.getVar("TARGET_OS", True) tos = d.getVar("TARGET_OS")
whitelist = [] whitelist = []
extralibcs = [""] extralibcs = [""]
if "uclibc" in d.getVar("BASECANADIANEXTRAOS", True): if "uclibc" in d.getVar("BASECANADIANEXTRAOS"):
extralibcs.append("uclibc") extralibcs.append("uclibc")
if "musl" in d.getVar("BASECANADIANEXTRAOS", True): if "musl" in d.getVar("BASECANADIANEXTRAOS"):
extralibcs.append("musl") extralibcs.append("musl")
for variant in ["", "spe", "x32", "eabi", "n32"]: for variant in ["", "spe", "x32", "eabi", "n32"]:
for libc in extralibcs: for libc in extralibcs:
@ -51,33 +51,33 @@ python () {
entry = entry + "-" + libc entry = entry + "-" + libc
whitelist.append(entry) whitelist.append(entry)
if tos not in whitelist: if tos not in whitelist:
bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS", True)) bb.fatal("Building cross-candian for an unknown TARGET_SYS (%s), please update cross-canadian.bbclass" % d.getVar("TARGET_SYS"))
for n in ["PROVIDES", "DEPENDS"]: for n in ["PROVIDES", "DEPENDS"]:
d.setVar(n, d.getVar(n, True)) d.setVar(n, d.getVar(n))
d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN", True)) d.setVar("STAGING_BINDIR_TOOLCHAIN", d.getVar("STAGING_BINDIR_TOOLCHAIN"))
for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]: for prefix in ["AR", "AS", "DLLTOOL", "CC", "CXX", "GCC", "LD", "LIPO", "NM", "OBJDUMP", "RANLIB", "STRIP", "WINDRES"]:
n = prefix + "_FOR_TARGET" n = prefix + "_FOR_TARGET"
d.setVar(n, d.getVar(n, True)) d.setVar(n, d.getVar(n))
# This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS # This is a bit ugly. We need to zero LIBC/ABI extension which will change TARGET_OS
# however we need the old value in some variables. We expand those here first. # however we need the old value in some variables. We expand those here first.
tarch = d.getVar("TARGET_ARCH", True) tarch = d.getVar("TARGET_ARCH")
if tarch == "x86_64": if tarch == "x86_64":
d.setVar("LIBCEXTENSION", "") d.setVar("LIBCEXTENSION", "")
d.setVar("ABIEXTENSION", "") d.setVar("ABIEXTENSION", "")
d.appendVar("CANADIANEXTRAOS", " linux-gnux32") d.appendVar("CANADIANEXTRAOS", " linux-gnux32")
for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): for extraos in d.getVar("BASECANADIANEXTRAOS").split():
d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32") d.appendVar("CANADIANEXTRAOS", " " + extraos + "x32")
elif tarch == "powerpc": elif tarch == "powerpc":
# PowerPC can build "linux" and "linux-gnuspe" # PowerPC can build "linux" and "linux-gnuspe"
d.setVar("LIBCEXTENSION", "") d.setVar("LIBCEXTENSION", "")
d.setVar("ABIEXTENSION", "") d.setVar("ABIEXTENSION", "")
d.appendVar("CANADIANEXTRAOS", " linux-gnuspe") d.appendVar("CANADIANEXTRAOS", " linux-gnuspe")
for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): for extraos in d.getVar("BASECANADIANEXTRAOS").split():
d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe") d.appendVar("CANADIANEXTRAOS", " " + extraos + "spe")
elif tarch == "mips64": elif tarch == "mips64":
d.appendVar("CANADIANEXTRAOS", " linux-gnun32") d.appendVar("CANADIANEXTRAOS", " linux-gnun32")
for extraos in d.getVar("BASECANADIANEXTRAOS", True).split(): for extraos in d.getVar("BASECANADIANEXTRAOS").split():
d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32") d.appendVar("CANADIANEXTRAOS", " " + extraos + "n32")
if tarch == "arm" or tarch == "armeb": if tarch == "arm" or tarch == "armeb":
d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi") d.appendVar("CANADIANEXTRAOS", " linux-gnueabi linux-musleabi linux-uclibceabi")
@ -86,10 +86,10 @@ python () {
d.setVar("TARGET_OS", "linux") d.setVar("TARGET_OS", "linux")
# Also need to handle multilib target vendors # Also need to handle multilib target vendors
vendors = d.getVar("CANADIANEXTRAVENDOR", True) vendors = d.getVar("CANADIANEXTRAVENDOR")
if not vendors: if not vendors:
vendors = all_multilib_tune_values(d, 'TARGET_VENDOR') vendors = all_multilib_tune_values(d, 'TARGET_VENDOR')
origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL", True) origvendor = d.getVar("TARGET_VENDOR_MULTILIB_ORIGINAL")
if origvendor: if origvendor:
d.setVar("TARGET_VENDOR", origvendor) d.setVar("TARGET_VENDOR", origvendor)
if origvendor not in vendors.split(): if origvendor not in vendors.split():
@ -116,7 +116,7 @@ HOST_LD_ARCH = "${SDK_LD_ARCH}"
HOST_AS_ARCH = "${SDK_AS_ARCH}" HOST_AS_ARCH = "${SDK_AS_ARCH}"
#assign DPKG_ARCH #assign DPKG_ARCH
DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH', True), '')}" DPKG_ARCH = "${@debian_arch_map(d.getVar('SDK_ARCH'), '')}"
CPPFLAGS = "${BUILDSDK_CPPFLAGS}" CPPFLAGS = "${BUILDSDK_CPPFLAGS}"
CFLAGS = "${BUILDSDK_CFLAGS}" CFLAGS = "${BUILDSDK_CFLAGS}"

View File

@ -5,7 +5,7 @@ MACHINEOVERRIDES = ""
PACKAGE_ARCH = "${SDK_ARCH}" PACKAGE_ARCH = "${SDK_ARCH}"
python () { python () {
# set TUNE_PKGARCH to SDK_ARCH # set TUNE_PKGARCH to SDK_ARCH
d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH', True)) d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH'))
} }
STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}" STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}"

View File

@ -51,7 +51,7 @@ python do_cve_check () {
Check recipe for patched and unpatched CVEs Check recipe for patched and unpatched CVEs
""" """
if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
patched_cves = get_patches_cves(d) patched_cves = get_patches_cves(d)
patched, unpatched = check_cves(d, patched_cves) patched, unpatched = check_cves(d, patched_cves)
if patched or unpatched: if patched or unpatched:
@ -70,7 +70,7 @@ python cve_check_cleanup () {
Delete the file used to gather all the CVE information. Delete the file used to gather all the CVE information.
""" """
bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE", True)) bb.utils.remove(e.data.getVar("CVE_CHECK_TMP_FILE"))
} }
addhandler cve_check_cleanup addhandler cve_check_cleanup
@ -83,12 +83,12 @@ python cve_check_write_rootfs_manifest () {
import shutil import shutil
if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE", True)): if os.path.exists(d.getVar("CVE_CHECK_TMP_FILE")):
bb.note("Writing rootfs CVE manifest") bb.note("Writing rootfs CVE manifest")
deploy_dir = d.getVar("DEPLOY_DIR_IMAGE", True) deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
link_name = d.getVar("IMAGE_LINK_NAME", True) link_name = d.getVar("IMAGE_LINK_NAME")
manifest_name = d.getVar("CVE_CHECK_MANIFEST", True) manifest_name = d.getVar("CVE_CHECK_MANIFEST")
cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE", True) cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
shutil.copyfile(cve_tmp_file, manifest_name) shutil.copyfile(cve_tmp_file, manifest_name)
@ -101,7 +101,7 @@ python cve_check_write_rootfs_manifest () {
bb.plain("Image CVE report stored in: %s" % manifest_name) bb.plain("Image CVE report stored in: %s" % manifest_name)
} }
ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST', True) == '1' else ''}" ROOTFS_POSTPROCESS_COMMAND_prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
def get_patches_cves(d): def get_patches_cves(d):
""" """
@ -110,7 +110,7 @@ def get_patches_cves(d):
import re import re
pn = d.getVar("PN", True) pn = d.getVar("PN")
cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+") cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
patched_cves = set() patched_cves = set()
bb.debug(2, "Looking for patches that solves CVEs for %s" % pn) bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
@ -149,15 +149,15 @@ def check_cves(d, patched_cves):
cves_patched = [] cves_patched = []
cves_unpatched = [] cves_unpatched = []
bpn = d.getVar("CVE_PRODUCT") bpn = d.getVar("CVE_PRODUCT")
pv = d.getVar("PV", True).split("git+")[0] pv = d.getVar("PV").split("git+")[0]
cves = " ".join(patched_cves) cves = " ".join(patched_cves)
cve_db_dir = d.getVar("CVE_CHECK_DB_DIR", True) cve_db_dir = d.getVar("CVE_CHECK_DB_DIR")
cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST", True)) cve_whitelist = ast.literal_eval(d.getVar("CVE_CHECK_CVE_WHITELIST"))
cve_cmd = "cve-check-tool" cve_cmd = "cve-check-tool"
cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir] cmd = [cve_cmd, "--no-html", "--csv", "--not-affected", "-t", "faux", "-d", cve_db_dir]
# If the recipe has been whitlisted we return empty lists # If the recipe has been whitlisted we return empty lists
if d.getVar("PN", True) in d.getVar("CVE_CHECK_PN_WHITELIST", True).split(): if d.getVar("PN") in d.getVar("CVE_CHECK_PN_WHITELIST").split():
bb.note("Recipe has been whitelisted, skipping check") bb.note("Recipe has been whitelisted, skipping check")
return ([], []) return ([], [])
@ -210,7 +210,7 @@ def get_cve_info(d, cves):
from pysqlite2 import dbapi2 as sqlite3 from pysqlite2 import dbapi2 as sqlite3
cve_data = {} cve_data = {}
db_file = d.getVar("CVE_CHECK_DB_FILE", True) db_file = d.getVar("CVE_CHECK_DB_FILE")
placeholder = ",".join("?" * len(cves)) placeholder = ",".join("?" * len(cves))
query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder query = "SELECT * FROM NVD WHERE id IN (%s)" % placeholder
conn = sqlite3.connect(db_file) conn = sqlite3.connect(db_file)
@ -231,15 +231,15 @@ def cve_write_data(d, patched, unpatched, cve_data):
CVE manifest if enabled. CVE manifest if enabled.
""" """
cve_file = d.getVar("CVE_CHECK_LOCAL_FILE", True) cve_file = d.getVar("CVE_CHECK_LOCAL_FILE")
nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId=" nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId="
write_string = "" write_string = ""
first_alert = True first_alert = True
bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR", True)) bb.utils.mkdirhier(d.getVar("CVE_CHECK_LOCAL_DIR"))
for cve in sorted(cve_data): for cve in sorted(cve_data):
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN", True) write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV", True) write_string += "PACKAGE VERSION: %s\n" % d.getVar("PV")
write_string += "CVE: %s\n" % cve write_string += "CVE: %s\n" % cve
if cve in patched: if cve in patched:
write_string += "CVE STATUS: Patched\n" write_string += "CVE STATUS: Patched\n"
@ -257,13 +257,13 @@ def cve_write_data(d, patched, unpatched, cve_data):
bb.note("Writing file %s with CVE information" % cve_file) bb.note("Writing file %s with CVE information" % cve_file)
f.write(write_string) f.write(write_string)
if d.getVar("CVE_CHECK_COPY_FILES", True) == "1": if d.getVar("CVE_CHECK_COPY_FILES") == "1":
cve_dir = d.getVar("CVE_CHECK_DIR", True) cve_dir = d.getVar("CVE_CHECK_DIR")
bb.utils.mkdirhier(cve_dir) bb.utils.mkdirhier(cve_dir)
deploy_file = os.path.join(cve_dir, d.getVar("PN", True)) deploy_file = os.path.join(cve_dir, d.getVar("PN"))
with open(deploy_file, "w") as f: with open(deploy_file, "w") as f:
f.write(write_string) f.write(write_string)
if d.getVar("CVE_CHECK_CREATE_MANIFEST", True) == "1": if d.getVar("CVE_CHECK_CREATE_MANIFEST") == "1":
with open(d.getVar("CVE_CHECK_TMP_FILE", True), "a") as f: with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f:
f.write("%s" % write_string) f.write("%s" % write_string)

View File

@ -20,17 +20,17 @@ do_package_write_tar[rdeptask] = "${DEBIANRDEP}"
do_package_write_rpm[rdeptask] = "${DEBIANRDEP}" do_package_write_rpm[rdeptask] = "${DEBIANRDEP}"
python () { python () {
if not d.getVar("PACKAGES", True): if not d.getVar("PACKAGES"):
d.setVar("DEBIANRDEP", "") d.setVar("DEBIANRDEP", "")
} }
python debian_package_name_hook () { python debian_package_name_hook () {
import glob, copy, stat, errno, re import glob, copy, stat, errno, re
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir")) + "$")
lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir")) + "$")
so_re = re.compile("lib.*\.so") so_re = re.compile("lib.*\.so")
def socrunch(s): def socrunch(s):
@ -53,11 +53,11 @@ python debian_package_name_hook () {
return (s[stat.ST_MODE] & stat.S_IEXEC) return (s[stat.ST_MODE] & stat.S_IEXEC)
def add_rprovides(pkg, d): def add_rprovides(pkg, d):
newpkg = d.getVar('PKG_' + pkg, True) newpkg = d.getVar('PKG_' + pkg)
if newpkg and newpkg != pkg: if newpkg and newpkg != pkg:
provs = (d.getVar('RPROVIDES_' + pkg, True) or "").split() provs = (d.getVar('RPROVIDES_' + pkg) or "").split()
if pkg not in provs: if pkg not in provs:
d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV", True) + ")") d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")")
def auto_libname(packages, orig_pkg): def auto_libname(packages, orig_pkg):
sonames = [] sonames = []
@ -70,7 +70,7 @@ python debian_package_name_hook () {
if lib_re.match(root): if lib_re.match(root):
has_libs = 1 has_libs = 1
if so_re.match(os.path.basename(file)): if so_re.match(os.path.basename(file)):
cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + file + " 2>/dev/null" cmd = (d.getVar('TARGET_PREFIX') or "") + "objdump -p " + file + " 2>/dev/null"
fd = os.popen(cmd) fd = os.popen(cmd)
lines = fd.readlines() lines = fd.readlines()
fd.close() fd.close()
@ -84,7 +84,7 @@ python debian_package_name_hook () {
if len(sonames) == 1: if len(sonames) == 1:
soname = sonames[0] soname = sonames[0]
elif len(sonames) > 1: elif len(sonames) > 1:
lead = d.getVar('LEAD_SONAME', True) lead = d.getVar('LEAD_SONAME')
if lead: if lead:
r = re.compile(lead) r = re.compile(lead)
filtered = [] filtered = []
@ -115,7 +115,7 @@ python debian_package_name_hook () {
newpkg = pkgname newpkg = pkgname
else: else:
newpkg = pkg.replace(orig_pkg, devname, 1) newpkg = pkg.replace(orig_pkg, devname, 1)
mlpre=d.getVar('MLPREFIX', True) mlpre=d.getVar('MLPREFIX')
if mlpre: if mlpre:
if not newpkg.find(mlpre) == 0: if not newpkg.find(mlpre) == 0:
newpkg = mlpre + newpkg newpkg = mlpre + newpkg
@ -131,7 +131,7 @@ python debian_package_name_hook () {
# and later # and later
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
# so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS') or "").split(), reverse=True):
auto_libname(packages, pkg) auto_libname(packages, pkg)
} }

View File

@ -5,14 +5,14 @@ DEVSHELL = "${SHELL}"
python do_devshell () { python do_devshell () {
if d.getVarFlag("do_devshell", "manualfakeroot", True): if d.getVarFlag("do_devshell", "manualfakeroot", True):
d.prependVar("DEVSHELL", "pseudo ") d.prependVar("DEVSHELL", "pseudo ")
fakeenv = d.getVar("FAKEROOTENV", True).split() fakeenv = d.getVar("FAKEROOTENV").split()
for f in fakeenv: for f in fakeenv:
k = f.split("=") k = f.split("=")
d.setVar(k[0], k[1]) d.setVar(k[0], k[1])
d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0]) d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0])
d.delVarFlag("do_devshell", "fakeroot") d.delVarFlag("do_devshell", "fakeroot")
oe_terminal(d.getVar('DEVSHELL', True), 'OpenEmbedded Developer Shell', d) oe_terminal(d.getVar('DEVSHELL'), 'OpenEmbedded Developer Shell', d)
} }
addtask devshell after do_patch addtask devshell after do_patch
@ -82,7 +82,7 @@ def devpyshell(d):
more = False more = False
i = code.InteractiveInterpreter(locals=_context) i = code.InteractiveInterpreter(locals=_context)
print("OE PyShell (PN = %s)\n" % d.getVar("PN", True)) print("OE PyShell (PN = %s)\n" % d.getVar("PN"))
def prompt(more): def prompt(more):
if more: if more:

View File

@ -11,15 +11,15 @@
python () { python () {
# Assume at least one var is set. # Assume at least one var is set.
distro_features = (d.getVar('DISTRO_FEATURES', True) or "").split() distro_features = (d.getVar('DISTRO_FEATURES') or "").split()
any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES', True) any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES')
if any_of_distro_features: if any_of_distro_features:
any_of_distro_features = any_of_distro_features.split() any_of_distro_features = any_of_distro_features.split()
if set.isdisjoint(set(any_of_distro_features),set(distro_features)): if set.isdisjoint(set(any_of_distro_features),set(distro_features)):
raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features) raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features)
required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES', True) required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES')
if required_distro_features: if required_distro_features:
required_distro_features = required_distro_features.split() required_distro_features = required_distro_features.split()
for f in required_distro_features: for f in required_distro_features:
@ -28,7 +28,7 @@ python () {
else: else:
raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f) raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f)
conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES', True) conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES')
if conflict_distro_features: if conflict_distro_features:
conflict_distro_features = conflict_distro_features.split() conflict_distro_features = conflict_distro_features.split()
for f in conflict_distro_features: for f in conflict_distro_features:

View File

@ -25,75 +25,75 @@ addtask distrodata_np
do_distrodata_np[nostamp] = "1" do_distrodata_np[nostamp] = "1"
python do_distrodata_np() { python do_distrodata_np() {
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
pn = d.getVar("PN", True) pn = d.getVar("PN")
bb.note("Package Name: %s" % pn) bb.note("Package Name: %s" % pn)
import oe.distro_check as dist_check import oe.distro_check as dist_check
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
distro_check_dir = os.path.join(tmpdir, "distro_check") distro_check_dir = os.path.join(tmpdir, "distro_check")
datetime = localdata.getVar('DATETIME', True) datetime = localdata.getVar('DATETIME')
dist_check.update_distro_data(distro_check_dir, datetime, localdata) dist_check.update_distro_data(distro_check_dir, datetime, localdata)
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-crosssdk") != -1: if pn.find("-crosssdk") != -1:
pnstripped = pn.split("-crosssdk") pnstripped = pn.split("-crosssdk")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.startswith("nativesdk-"): if pn.startswith("nativesdk-"):
pnstripped = pn.replace("nativesdk-", "") pnstripped = pn.replace("nativesdk-", "")
bb.note("NativeSDK Split: %s" % pnstripped) bb.note("NativeSDK Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
"""generate package information from .bb file""" """generate package information from .bb file"""
pname = localdata.getVar('PN', True) pname = localdata.getVar('PN')
pcurver = localdata.getVar('PV', True) pcurver = localdata.getVar('PV')
pdesc = localdata.getVar('DESCRIPTION', True) pdesc = localdata.getVar('DESCRIPTION')
if pdesc is not None: if pdesc is not None:
pdesc = pdesc.replace(',','') pdesc = pdesc.replace(',','')
pdesc = pdesc.replace('\n','') pdesc = pdesc.replace('\n','')
pgrp = localdata.getVar('SECTION', True) pgrp = localdata.getVar('SECTION')
plicense = localdata.getVar('LICENSE', True).replace(',','_') plicense = localdata.getVar('LICENSE').replace(',','_')
rstatus = localdata.getVar('RECIPE_COLOR', True) rstatus = localdata.getVar('RECIPE_COLOR')
if rstatus is not None: if rstatus is not None:
rstatus = rstatus.replace(',','') rstatus = rstatus.replace(',','')
pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
if pcurver == pupver: if pcurver == pupver:
vermatch="1" vermatch="1"
else: else:
vermatch="0" vermatch="0"
noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
if noupdate_reason is None: if noupdate_reason is None:
noupdate="0" noupdate="0"
else: else:
noupdate="1" noupdate="1"
noupdate_reason = noupdate_reason.replace(',','') noupdate_reason = noupdate_reason.replace(',','')
maintainer = localdata.getVar('RECIPE_MAINTAINER', True) maintainer = localdata.getVar('RECIPE_MAINTAINER')
rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \ bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \
@ -109,80 +109,80 @@ addtask distrodata
do_distrodata[nostamp] = "1" do_distrodata[nostamp] = "1"
python do_distrodata() { python do_distrodata() {
import csv import csv
logpath = d.getVar('LOG_DIR', True) logpath = d.getVar('LOG_DIR')
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
logfile = os.path.join(logpath, "distrodata.csv") logfile = os.path.join(logpath, "distrodata.csv")
import oe.distro_check as dist_check import oe.distro_check as dist_check
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
distro_check_dir = os.path.join(tmpdir, "distro_check") distro_check_dir = os.path.join(tmpdir, "distro_check")
datetime = localdata.getVar('DATETIME', True) datetime = localdata.getVar('DATETIME')
dist_check.update_distro_data(distro_check_dir, datetime, localdata) dist_check.update_distro_data(distro_check_dir, datetime, localdata)
pn = d.getVar("PN", True) pn = d.getVar("PN")
bb.note("Package Name: %s" % pn) bb.note("Package Name: %s" % pn)
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.startswith("nativesdk-"): if pn.startswith("nativesdk-"):
pnstripped = pn.replace("nativesdk-", "") pnstripped = pn.replace("nativesdk-", "")
bb.note("NativeSDK Split: %s" % pnstripped) bb.note("NativeSDK Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-crosssdk") != -1: if pn.find("-crosssdk") != -1:
pnstripped = pn.split("-crosssdk") pnstripped = pn.split("-crosssdk")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
"""generate package information from .bb file""" """generate package information from .bb file"""
pname = localdata.getVar('PN', True) pname = localdata.getVar('PN')
pcurver = localdata.getVar('PV', True) pcurver = localdata.getVar('PV')
pdesc = localdata.getVar('DESCRIPTION', True) pdesc = localdata.getVar('DESCRIPTION')
if pdesc is not None: if pdesc is not None:
pdesc = pdesc.replace(',','') pdesc = pdesc.replace(',','')
pdesc = pdesc.replace('\n','') pdesc = pdesc.replace('\n','')
pgrp = localdata.getVar('SECTION', True) pgrp = localdata.getVar('SECTION')
plicense = localdata.getVar('LICENSE', True).replace(',','_') plicense = localdata.getVar('LICENSE').replace(',','_')
rstatus = localdata.getVar('RECIPE_COLOR', True) rstatus = localdata.getVar('RECIPE_COLOR')
if rstatus is not None: if rstatus is not None:
rstatus = rstatus.replace(',','') rstatus = rstatus.replace(',','')
pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION')
if pcurver == pupver: if pcurver == pupver:
vermatch="1" vermatch="1"
else: else:
vermatch="0" vermatch="0"
noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON')
if noupdate_reason is None: if noupdate_reason is None:
noupdate="0" noupdate="0"
else: else:
noupdate="1" noupdate="1"
noupdate_reason = noupdate_reason.replace(',','') noupdate_reason = noupdate_reason.replace(',','')
maintainer = localdata.getVar('RECIPE_MAINTAINER', True) maintainer = localdata.getVar('RECIPE_MAINTAINER')
rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE')
# do the comparison # do the comparison
result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata)
@ -272,60 +272,60 @@ python do_checkpkg() {
from bb.fetch2 import FetchError, NoMethodError, decodeurl from bb.fetch2 import FetchError, NoMethodError, decodeurl
"""first check whether a uri is provided""" """first check whether a uri is provided"""
src_uri = (d.getVar('SRC_URI', True) or '').split() src_uri = (d.getVar('SRC_URI') or '').split()
if src_uri: if src_uri:
uri_type, _, _, _, _, _ = decodeurl(src_uri[0]) uri_type, _, _, _, _, _ = decodeurl(src_uri[0])
else: else:
uri_type = "none" uri_type = "none"
"""initialize log files.""" """initialize log files."""
logpath = d.getVar('LOG_DIR', True) logpath = d.getVar('LOG_DIR')
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
logfile = os.path.join(logpath, "checkpkg.csv") logfile = os.path.join(logpath, "checkpkg.csv")
"""generate package information from .bb file""" """generate package information from .bb file"""
pname = d.getVar('PN', True) pname = d.getVar('PN')
if pname.find("-native") != -1: if pname.find("-native") != -1:
if d.getVar('BBCLASSEXTEND', True): if d.getVar('BBCLASSEXTEND'):
return return
pnstripped = pname.split("-native") pnstripped = pname.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pname.startswith("nativesdk-"): if pname.startswith("nativesdk-"):
if d.getVar('BBCLASSEXTEND', True): if d.getVar('BBCLASSEXTEND'):
return return
pnstripped = pname.replace("nativesdk-", "") pnstripped = pname.replace("nativesdk-", "")
bb.note("NativeSDK Split: %s" % pnstripped) bb.note("NativeSDK Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pname.find("-cross") != -1: if pname.find("-cross") != -1:
pnstripped = pname.split("-cross") pnstripped = pname.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pname.find("-initial") != -1: if pname.find("-initial") != -1:
pnstripped = pname.split("-initial") pnstripped = pname.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES'))
bb.data.update_data(localdata) bb.data.update_data(localdata)
pdesc = localdata.getVar('DESCRIPTION', True) pdesc = localdata.getVar('DESCRIPTION')
pgrp = localdata.getVar('SECTION', True) pgrp = localdata.getVar('SECTION')
pversion = localdata.getVar('PV', True) pversion = localdata.getVar('PV')
plicense = localdata.getVar('LICENSE', True) plicense = localdata.getVar('LICENSE')
psection = localdata.getVar('SECTION', True) psection = localdata.getVar('SECTION')
phome = localdata.getVar('HOMEPAGE', True) phome = localdata.getVar('HOMEPAGE')
prelease = localdata.getVar('PR', True) prelease = localdata.getVar('PR')
pdepends = localdata.getVar('DEPENDS', True) pdepends = localdata.getVar('DEPENDS')
pbugtracker = localdata.getVar('BUGTRACKER', True) pbugtracker = localdata.getVar('BUGTRACKER')
ppe = localdata.getVar('PE', True) ppe = localdata.getVar('PE')
psrcuri = localdata.getVar('SRC_URI', True) psrcuri = localdata.getVar('SRC_URI')
maintainer = localdata.getVar('RECIPE_MAINTAINER', True) maintainer = localdata.getVar('RECIPE_MAINTAINER')
""" Get upstream version version """ """ Get upstream version version """
pupver = "" pupver = ""
@ -362,7 +362,7 @@ python do_checkpkg() {
psrcuri = "none" psrcuri = "none"
pdepends = "".join(pdepends.split("\t")) pdepends = "".join(pdepends.split("\t"))
pdesc = "".join(pdesc.split("\t")) pdesc = "".join(pdesc.split("\t"))
no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True) no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON')
lf = bb.utils.lockfile("%s.lock" % logfile) lf = bb.utils.lockfile("%s.lock" % logfile)
with open(logfile, "a") as f: with open(logfile, "a") as f:
writer = csv.writer(f, delimiter='\t') writer = csv.writer(f, delimiter='\t')
@ -401,12 +401,12 @@ python do_distro_check() {
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
distro_check_dir = os.path.join(tmpdir, "distro_check") distro_check_dir = os.path.join(tmpdir, "distro_check")
logpath = d.getVar('LOG_DIR', True) logpath = d.getVar('LOG_DIR')
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
result_file = os.path.join(logpath, "distrocheck.csv") result_file = os.path.join(logpath, "distrocheck.csv")
datetime = localdata.getVar('DATETIME', True) datetime = localdata.getVar('DATETIME')
dc.update_distro_data(distro_check_dir, datetime, localdata) dc.update_distro_data(distro_check_dir, datetime, localdata)
# do the comparison # do the comparison
@ -449,12 +449,12 @@ do_checklicense[nostamp] = "1"
python do_checklicense() { python do_checklicense() {
import csv import csv
import shutil import shutil
logpath = d.getVar('LOG_DIR', True) logpath = d.getVar('LOG_DIR')
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
pn = d.getVar('PN', True) pn = d.getVar('PN')
logfile = os.path.join(logpath, "missinglicense.csv") logfile = os.path.join(logpath, "missinglicense.csv")
generic_directory = d.getVar('COMMON_LICENSE_DIR', True) generic_directory = d.getVar('COMMON_LICENSE_DIR')
license_types = d.getVar('LICENSE', True) license_types = d.getVar('LICENSE')
for license_type in ((license_types.replace('+', '').replace('|', '&') for license_type in ((license_types.replace('+', '').replace('|', '&')
.replace('(', '').replace(')', '').replace(';', '') .replace('(', '').replace(')', '').replace(';', '')
.replace(',', '').replace(" ", "").split("&"))): .replace(',', '').replace(" ", "").split("&"))):

View File

@ -1,4 +1,4 @@
DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
inherit distutils-common-base pythonnative inherit distutils-common-base pythonnative

View File

@ -1,4 +1,4 @@
DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}"
RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}"
inherit distutils-common-base python3native inherit distutils-common-base python3native

View File

@ -28,34 +28,34 @@ SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}"
python () { python () {
externalsrc = d.getVar('EXTERNALSRC', True) externalsrc = d.getVar('EXTERNALSRC')
# If this is the base recipe and EXTERNALSRC is set for it or any of its # If this is the base recipe and EXTERNALSRC is set for it or any of its
# derivatives, then enable BB_DONT_CACHE to force the recipe to always be # derivatives, then enable BB_DONT_CACHE to force the recipe to always be
# re-parsed so that the file-checksums function for do_compile is run every # re-parsed so that the file-checksums function for do_compile is run every
# time. # time.
bpn = d.getVar('BPN', True) bpn = d.getVar('BPN')
if bpn == d.getVar('PN', True): if bpn == d.getVar('PN'):
classextend = (d.getVar('BBCLASSEXTEND', True) or '').split() classextend = (d.getVar('BBCLASSEXTEND') or '').split()
if (externalsrc or if (externalsrc or
('native' in classextend and ('native' in classextend and
d.getVar('EXTERNALSRC_pn-%s-native' % bpn, True)) or d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or
('nativesdk' in classextend and ('nativesdk' in classextend and
d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn, True)) or d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or
('cross' in classextend and ('cross' in classextend and
d.getVar('EXTERNALSRC_pn-%s-cross' % bpn, True))): d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))):
d.setVar('BB_DONT_CACHE', '1') d.setVar('BB_DONT_CACHE', '1')
if externalsrc: if externalsrc:
d.setVar('S', externalsrc) d.setVar('S', externalsrc)
externalsrcbuild = d.getVar('EXTERNALSRC_BUILD', True) externalsrcbuild = d.getVar('EXTERNALSRC_BUILD')
if externalsrcbuild: if externalsrcbuild:
d.setVar('B', externalsrcbuild) d.setVar('B', externalsrcbuild)
else: else:
d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') d.setVar('B', '${WORKDIR}/${BPN}-${PV}/')
local_srcuri = [] local_srcuri = []
fetch = bb.fetch2.Fetch((d.getVar('SRC_URI', True) or '').split(), d) fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
for url in fetch.urls: for url in fetch.urls:
url_data = fetch.ud[url] url_data = fetch.ud[url]
parm = url_data.parm parm = url_data.parm
@ -94,7 +94,7 @@ python () {
# Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
for task in d.getVar("SRCTREECOVEREDTASKS", True).split(): for task in d.getVar("SRCTREECOVEREDTASKS").split():
if local_srcuri and task in fetch_tasks: if local_srcuri and task in fetch_tasks:
continue continue
bb.build.deltask(task, d) bb.build.deltask(task, d)
@ -106,13 +106,13 @@ python () {
d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
# We don't want the workdir to go away # We don't want the workdir to go away
d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN', True)) d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
# If B=S the same builddir is used even for different architectures. # If B=S the same builddir is used even for different architectures.
# Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that
# change of do_configure task hash is correctly detected and stamps are # change of do_configure task hash is correctly detected and stamps are
# invalidated if e.g. MACHINE changes. # invalidated if e.g. MACHINE changes.
if d.getVar('S', True) == d.getVar('B', True): if d.getVar('S') == d.getVar('B'):
configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate'
d.setVar('CONFIGURESTAMPFILE', configstamp) d.setVar('CONFIGURESTAMPFILE', configstamp)
d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}')
@ -120,10 +120,10 @@ python () {
python externalsrc_configure_prefunc() { python externalsrc_configure_prefunc() {
# Create desired symlinks # Create desired symlinks
symlinks = (d.getVar('EXTERNALSRC_SYMLINKS', True) or '').split() symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()
for symlink in symlinks: for symlink in symlinks:
symsplit = symlink.split(':', 1) symsplit = symlink.split(':', 1)
lnkfile = os.path.join(d.getVar('S', True), symsplit[0]) lnkfile = os.path.join(d.getVar('S'), symsplit[0])
target = d.expand(symsplit[1]) target = d.expand(symsplit[1])
if len(symsplit) > 1: if len(symsplit) > 1:
if os.path.islink(lnkfile): if os.path.islink(lnkfile):
@ -139,7 +139,7 @@ python externalsrc_configure_prefunc() {
python externalsrc_compile_prefunc() { python externalsrc_compile_prefunc() {
# Make it obvious that this is happening, since forgetting about it could lead to much confusion # Make it obvious that this is happening, since forgetting about it could lead to much confusion
bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN', True), d.getVar('EXTERNALSRC', True))) bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC')))
} }
def srctree_hash_files(d): def srctree_hash_files(d):
@ -147,7 +147,7 @@ def srctree_hash_files(d):
import subprocess import subprocess
import tempfile import tempfile
s_dir = d.getVar('EXTERNALSRC', True) s_dir = d.getVar('EXTERNALSRC')
git_dir = os.path.join(s_dir, '.git') git_dir = os.path.join(s_dir, '.git')
oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1')
@ -165,7 +165,7 @@ def srctree_hash_files(d):
fobj.write(sha1) fobj.write(sha1)
ret = oe_hash_file + ':True' ret = oe_hash_file + ':True'
else: else:
ret = d.getVar('EXTERNALSRC', True) + '/*:True' ret = d.getVar('EXTERNALSRC') + '/*:True'
return ret return ret
def srctree_configure_hash_files(d): def srctree_configure_hash_files(d):
@ -173,7 +173,7 @@ def srctree_configure_hash_files(d):
Get the list of files that should trigger do_configure to re-execute, Get the list of files that should trigger do_configure to re-execute,
based on the value of CONFIGURE_FILES based on the value of CONFIGURE_FILES
""" """
in_files = (d.getVar('CONFIGURE_FILES', True) or '').split() in_files = (d.getVar('CONFIGURE_FILES') or '').split()
out_items = [] out_items = []
search_files = [] search_files = []
for entry in in_files: for entry in in_files:
@ -182,7 +182,7 @@ def srctree_configure_hash_files(d):
else: else:
search_files.append(entry) search_files.append(entry)
if search_files: if search_files:
s_dir = d.getVar('EXTERNALSRC', True) s_dir = d.getVar('EXTERNALSRC')
for root, _, files in os.walk(s_dir): for root, _, files in os.walk(s_dir):
for f in files: for f in files:
if f in search_files: if f in search_files:

View File

@ -15,7 +15,7 @@
inherit useradd_base inherit useradd_base
PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS', True))]}" PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}"
# Image level user / group settings # Image level user / group settings
ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;" ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;"

View File

@ -30,26 +30,26 @@ fi
} }
python () { python () {
font_pkgs = d.getVar('FONT_PACKAGES', True).split() font_pkgs = d.getVar('FONT_PACKAGES').split()
deps = d.getVar("FONT_EXTRA_RDEPENDS", True) deps = d.getVar("FONT_EXTRA_RDEPENDS")
for pkg in font_pkgs: for pkg in font_pkgs:
if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps) if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps)
} }
python add_fontcache_postinsts() { python add_fontcache_postinsts() {
for pkg in d.getVar('FONT_PACKAGES', True).split(): for pkg in d.getVar('FONT_PACKAGES').split():
bb.note("adding fonts postinst and postrm scripts to %s" % pkg) bb.note("adding fonts postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('fontcache_common', True) postinst += d.getVar('fontcache_common')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('fontcache_common', True) postrm += d.getVar('fontcache_common')
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -3,7 +3,7 @@
# on ext file systems and depends on tune2fs. # on ext file systems and depends on tune2fs.
def get_rootfs_uuid(d): def get_rootfs_uuid(d):
import subprocess import subprocess
rootfs = d.getVar('ROOTFS', True) rootfs = d.getVar('ROOTFS')
output = subprocess.check_output(['tune2fs', '-l', rootfs]) output = subprocess.check_output(['tune2fs', '-l', rootfs])
for line in output.split('\n'): for line in output.split('\n'):
if line.startswith('Filesystem UUID:'): if line.startswith('Filesystem UUID:'):

View File

@ -42,8 +42,8 @@ done
python populate_packages_append () { python populate_packages_append () {
import re import re
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
for pkg in packages: for pkg in packages:
schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
@ -56,15 +56,15 @@ python populate_packages_append () {
if schemas != []: if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg) bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
d.setVar('SCHEMA_FILES', " ".join(schemas)) d.setVar('SCHEMA_FILES', " ".join(schemas))
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gconf_postinst', True) postinst += d.getVar('gconf_postinst')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = d.getVar('pkg_prerm_%s' % pkg, True) prerm = d.getVar('pkg_prerm_%s' % pkg)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += d.getVar('gconf_prerm', True) prerm += d.getVar('gconf_prerm')
d.setVar('pkg_prerm_%s' % pkg, prerm) d.setVar('pkg_prerm_%s' % pkg, prerm)
d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf')
} }

View File

@ -1,15 +1,15 @@
def gettext_dependencies(d): def gettext_dependencies(d):
if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
return "" return ""
if d.getVar('USE_NLS', True) == 'no': if d.getVar('USE_NLS') == 'no':
return "gettext-minimal-native" return "gettext-minimal-native"
return d.getVar('DEPENDS_GETTEXT', False) return d.getVar('DEPENDS_GETTEXT', False)
def gettext_oeconf(d): def gettext_oeconf(d):
if d.getVar('USE_NLS', True) == 'no': if d.getVar('USE_NLS') == 'no':
return '--disable-nls' return '--disable-nls'
# Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set
if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'):
return '--disable-nls' return '--disable-nls'
return "--enable-nls" return "--enable-nls"

View File

@ -17,21 +17,21 @@ fi
} }
python populate_packages_append () { python populate_packages_append () {
packages = d.getVar('GIO_MODULE_PACKAGES', True).split() packages = d.getVar('GIO_MODULE_PACKAGES').split()
for pkg in packages: for pkg in packages:
bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gio_module_cache_common', True) postinst += d.getVar('gio_module_cache_common')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) postrm = d.getVar('pkg_postrm_%s' % pkg)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('gio_module_cache_common', True) postrm += d.getVar('gio_module_cache_common')
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -72,14 +72,14 @@ efi_hddimg_populate() {
python build_efi_cfg() { python build_efi_cfg() {
import sys import sys
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
gfxserial = d.getVar('GRUB_GFXSERIAL', True) or "" gfxserial = d.getVar('GRUB_GFXSERIAL') or ""
labels = d.getVar('LABELS', True) labels = d.getVar('LABELS')
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -88,7 +88,7 @@ python build_efi_cfg() {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('GRUB_CFG', True) cfile = d.getVar('GRUB_CFG')
if not cfile: if not cfile:
bb.fatal('Unable to read GRUB_CFG') bb.fatal('Unable to read GRUB_CFG')
@ -99,33 +99,33 @@ python build_efi_cfg() {
cfgfile.write('# Automatically created by OE\n') cfgfile.write('# Automatically created by OE\n')
opts = d.getVar('GRUB_OPTS', True) opts = d.getVar('GRUB_OPTS')
if opts: if opts:
for opt in opts.split(';'): for opt in opts.split(';'):
cfgfile.write('%s\n' % opt) cfgfile.write('%s\n' % opt)
cfgfile.write('default=%s\n' % (labels.split()[0])) cfgfile.write('default=%s\n' % (labels.split()[0]))
timeout = d.getVar('GRUB_TIMEOUT', True) timeout = d.getVar('GRUB_TIMEOUT')
if timeout: if timeout:
cfgfile.write('timeout=%s\n' % timeout) cfgfile.write('timeout=%s\n' % timeout)
else: else:
cfgfile.write('timeout=50\n') cfgfile.write('timeout=50\n')
root = d.getVar('GRUB_ROOT', True) root = d.getVar('GRUB_ROOT')
if not root: if not root:
bb.fatal('GRUB_ROOT not defined') bb.fatal('GRUB_ROOT not defined')
if gfxserial == "1": if gfxserial == "1":
btypes = [ [ " graphics console", "" ], btypes = [ [ " graphics console", "" ],
[ " serial console", d.getVar('GRUB_SERIAL', True) or "" ] ] [ " serial console", d.getVar('GRUB_SERIAL') or "" ] ]
else: else:
btypes = [ [ "", "" ] ] btypes = [ [ "", "" ] ]
for label in labels.split(): for label in labels.split():
localdata = d.createCopy() localdata = d.createCopy()
overrides = localdata.getVar('OVERRIDES', True) overrides = localdata.getVar('OVERRIDES')
if not overrides: if not overrides:
bb.fatal('OVERRIDES not defined') bb.fatal('OVERRIDES not defined')
@ -141,8 +141,8 @@ python build_efi_cfg() {
cfgfile.write(' %s' % replace_rootfs_uuid(d, root)) cfgfile.write(' %s' % replace_rootfs_uuid(d, root))
append = localdata.getVar('APPEND', True) append = localdata.getVar('APPEND')
initrd = localdata.getVar('INITRD', True) initrd = localdata.getVar('INITRD')
if append: if append:
append = replace_rootfs_uuid(d, append) append = replace_rootfs_uuid(d, append)

View File

@ -18,20 +18,20 @@ gsettings_postinstrm () {
} }
python populate_packages_append () { python populate_packages_append () {
pkg = d.getVar('PN', True) pkg = d.getVar('PN')
bb.note("adding gsettings postinst scripts to %s" % pkg) bb.note("adding gsettings postinst scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gsettings_postinstrm', True) postinst += d.getVar('gsettings_postinstrm')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
bb.note("adding gsettings postrm scripts to %s" % pkg) bb.note("adding gsettings postrm scripts to %s" % pkg)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('gsettings_postinstrm', True) postrm += d.getVar('gsettings_postinstrm')
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -35,11 +35,11 @@ fi
} }
python populate_packages_append () { python populate_packages_append () {
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
for pkg in packages: for pkg in packages:
icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir'))
if not os.path.exists(icon_dir): if not os.path.exists(icon_dir):
continue continue
@ -49,16 +49,16 @@ python populate_packages_append () {
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gtk_icon_cache_postinst', True) postinst += d.getVar('gtk_icon_cache_postinst')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) postrm = d.getVar('pkg_postrm_%s' % pkg)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('gtk_icon_cache_postrm', True) postrm += d.getVar('gtk_icon_cache_postrm')
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -61,21 +61,21 @@ fi
} }
python populate_packages_append () { python populate_packages_append () {
gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES', True).split() gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES').split()
for pkg in gtkimmodules_pkgs: for pkg in gtkimmodules_pkgs:
bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gtk_immodule_cache_postinst', True) postinst += d.getVar('gtk_immodule_cache_postinst')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) postrm = d.getVar('pkg_postrm_%s' % pkg)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('gtk_immodule_cache_postrm', True) postrm += d.getVar('gtk_immodule_cache_postrm')
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -100,7 +100,7 @@ def use_icecc(bb,d):
if icecc_is_allarch(bb, d): if icecc_is_allarch(bb, d):
return "no" return "no"
pn = d.getVar('PN', True) pn = d.getVar('PN')
system_class_blacklist = [] system_class_blacklist = []
user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split() user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split()
@ -139,7 +139,7 @@ def use_icecc(bb,d):
return "yes" return "yes"
def icecc_is_allarch(bb, d): def icecc_is_allarch(bb, d):
return d.getVar("PACKAGE_ARCH", True) == "all" or bb.data.inherits_class('allarch', d) return d.getVar("PACKAGE_ARCH") == "all" or bb.data.inherits_class('allarch', d)
def icecc_is_kernel(bb, d): def icecc_is_kernel(bb, d):
return \ return \

View File

@ -18,7 +18,7 @@ def image_buildinfo_outputvars(vars, listvars, d):
listvars = listvars.split() listvars = listvars.split()
ret = "" ret = ""
for var in vars: for var in vars:
value = d.getVar(var, True) or "" value = d.getVar(var) or ""
if (d.getVarFlag(var, 'type', True) == "list"): if (d.getVarFlag(var, 'type', True) == "list"):
value = oe.utils.squashspaces(value) value = oe.utils.squashspaces(value)
ret += "%s = %s\n" % (var, value) ret += "%s = %s\n" % (var, value)
@ -42,7 +42,7 @@ def get_layer_git_status(path):
# Returns layer revisions along with their respective status # Returns layer revisions along with their respective status
def get_layer_revs(d): def get_layer_revs(d):
layers = (d.getVar("BBLAYERS", True) or "").split() layers = (d.getVar("BBLAYERS") or "").split()
medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \ medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None), \ base_get_metadata_git_revision(i, None), \
@ -52,11 +52,11 @@ def get_layer_revs(d):
def buildinfo_target(d): def buildinfo_target(d):
# Get context # Get context
if d.getVar('BB_WORKERCONTEXT', True) != '1': if d.getVar('BB_WORKERCONTEXT') != '1':
return "" return ""
# Single and list variables to be read # Single and list variables to be read
vars = (d.getVar("IMAGE_BUILDINFO_VARS", True) or "") vars = (d.getVar("IMAGE_BUILDINFO_VARS") or "")
listvars = (d.getVar("IMAGE_BUILDINFO_LVARS", True) or "") listvars = (d.getVar("IMAGE_BUILDINFO_LVARS") or "")
return image_buildinfo_outputvars(vars, listvars, d) return image_buildinfo_outputvars(vars, listvars, d)
# Write build information to target filesystem # Write build information to target filesystem

View File

@ -51,8 +51,8 @@ IMAGE_TYPEDEP_hddimg = "ext4"
IMAGE_TYPES_MASKED += "live hddimg iso" IMAGE_TYPES_MASKED += "live hddimg iso"
python() { python() {
image_b = d.getVar('IMAGE_BASENAME', True) image_b = d.getVar('IMAGE_BASENAME')
initrd_i = d.getVar('INITRD_IMAGE_LIVE', True) initrd_i = d.getVar('INITRD_IMAGE_LIVE')
if image_b == initrd_i: if image_b == initrd_i:
bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i) bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i)
bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.') bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.')
@ -264,9 +264,9 @@ build_hddimg() {
python do_bootimg() { python do_bootimg() {
set_live_vm_vars(d, 'LIVE') set_live_vm_vars(d, 'LIVE')
if d.getVar("PCBIOS", True) == "1": if d.getVar("PCBIOS") == "1":
bb.build.exec_func('build_syslinux_cfg', d) bb.build.exec_func('build_syslinux_cfg', d)
if d.getVar("EFI", True) == "1": if d.getVar("EFI") == "1":
bb.build.exec_func('build_efi_cfg', d) bb.build.exec_func('build_efi_cfg', d)
bb.build.exec_func('build_hddimg', d) bb.build.exec_func('build_hddimg', d)
bb.build.exec_func('build_iso', d) bb.build.exec_func('build_iso', d)

View File

@ -112,9 +112,9 @@ build_boot_dd() {
python do_bootdirectdisk() { python do_bootdirectdisk() {
validate_disk_signature(d) validate_disk_signature(d)
set_live_vm_vars(d, 'VM') set_live_vm_vars(d, 'VM')
if d.getVar("PCBIOS", True) == "1": if d.getVar("PCBIOS") == "1":
bb.build.exec_func('build_syslinux_cfg', d) bb.build.exec_func('build_syslinux_cfg', d)
if d.getVar("EFI", True) == "1": if d.getVar("EFI") == "1":
bb.build.exec_func('build_efi_cfg', d) bb.build.exec_func('build_efi_cfg', d)
bb.build.exec_func('build_boot_dd', d) bb.build.exec_func('build_boot_dd', d)
} }
@ -132,7 +132,7 @@ def generate_disk_signature():
def validate_disk_signature(d): def validate_disk_signature(d):
import re import re
disk_signature = d.getVar("DISK_SIGNATURE", True) disk_signature = d.getVar("DISK_SIGNATURE")
if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature): if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature):
bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature) bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature)
@ -158,11 +158,11 @@ create_qcow2_image () {
} }
python do_vmimg() { python do_vmimg() {
if 'vmdk' in d.getVar('IMAGE_FSTYPES', True): if 'vmdk' in d.getVar('IMAGE_FSTYPES'):
bb.build.exec_func('create_vmdk_image', d) bb.build.exec_func('create_vmdk_image', d)
if 'vdi' in d.getVar('IMAGE_FSTYPES', True): if 'vdi' in d.getVar('IMAGE_FSTYPES'):
bb.build.exec_func('create_vdi_image', d) bb.build.exec_func('create_vdi_image', d)
if 'qcow2' in d.getVar('IMAGE_FSTYPES', True): if 'qcow2' in d.getVar('IMAGE_FSTYPES'):
bb.build.exec_func('create_qcow2_image', d) bb.build.exec_func('create_qcow2_image', d)
} }

View File

@ -2,7 +2,7 @@ inherit rootfs_${IMAGE_PKGTYPE}
# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk # Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk
# in the non-Linux SDK_OS case, such as mingw32 # in the non-Linux SDK_OS case, such as mingw32
SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS", True)]}" SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}"
inherit ${SDKEXTCLASS} inherit ${SDKEXTCLASS}
TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}" TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}"
@ -133,7 +133,7 @@ def build_live(d):
if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg
d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d)) d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d))
d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d)) d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d))
if d.getVar('NOISO', True) == "0" or d.getVar('NOHDD', True) == "0": if d.getVar('NOISO') == "0" or d.getVar('NOHDD') == "0":
return "image-live" return "image-live"
return "" return ""
return "image-live" return "image-live"
@ -145,7 +145,7 @@ IMAGE_TYPE_vm = '${@bb.utils.contains_any("IMAGE_FSTYPES", ["vmdk", "vdi", "qcow
inherit ${IMAGE_TYPE_vm} inherit ${IMAGE_TYPE_vm}
def build_uboot(d): def build_uboot(d):
if 'u-boot' in (d.getVar('IMAGE_FSTYPES', True) or ''): if 'u-boot' in (d.getVar('IMAGE_FSTYPES') or ''):
return "image_types_uboot" return "image_types_uboot"
else: else:
return "" return ""
@ -158,7 +158,7 @@ python () {
d.appendVarFlag('do_rootfs', 'depends', deps) d.appendVarFlag('do_rootfs', 'depends', deps)
deps = "" deps = ""
for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split(): for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split():
deps += " %s:do_populate_sysroot" % dep deps += " %s:do_populate_sysroot" % dep
d.appendVarFlag('do_build', 'depends', deps) d.appendVarFlag('do_build', 'depends', deps)
@ -167,22 +167,22 @@ python () {
features = set(oe.data.typed_value('IMAGE_FEATURES', d)) features = set(oe.data.typed_value('IMAGE_FEATURES', d))
remain_features = features.copy() remain_features = features.copy()
for feature in features: for feature in features:
replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature, True) or "").split()) replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature) or "").split())
remain_features -= replaces remain_features -= replaces
#Check for conflict image features #Check for conflict image features
for feature in remain_features: for feature in remain_features:
conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature, True) or "").split()) conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature) or "").split())
temp = conflicts & remain_features temp = conflicts & remain_features
if temp: if temp:
bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN', True), feature, ' '.join(list(temp)))) bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN'), feature, ' '.join(list(temp))))
d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features)))) d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features))))
check_image_features(d) check_image_features(d)
initramfs_image = d.getVar('INITRAMFS_IMAGE', True) or "" initramfs_image = d.getVar('INITRAMFS_IMAGE') or ""
if initramfs_image != "": if initramfs_image != "":
d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN', True)) d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN'))
d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image) d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image)
} }
@ -194,7 +194,7 @@ IMAGE_POSTPROCESS_COMMAND ?= ""
# some default locales # some default locales
IMAGE_LINGUAS ?= "de-de fr-fr en-gb" IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}" LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}"
# Prefer image, but use the fallback files for lookups if the image ones # Prefer image, but use the fallback files for lookups if the image ones
# aren't yet available. # aren't yet available.
@ -229,20 +229,20 @@ fakeroot python do_rootfs () {
progress_reporter.next_stage() progress_reporter.next_stage()
# Handle package exclusions # Handle package exclusions
excl_pkgs = d.getVar("PACKAGE_EXCLUDE", True).split() excl_pkgs = d.getVar("PACKAGE_EXCLUDE").split()
inst_pkgs = d.getVar("PACKAGE_INSTALL", True).split() inst_pkgs = d.getVar("PACKAGE_INSTALL").split()
inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY", True).split() inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY").split()
d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs)) d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs))
d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs)) d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs))
for pkg in excl_pkgs: for pkg in excl_pkgs:
if pkg in inst_pkgs: if pkg in inst_pkgs:
bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs))
inst_pkgs.remove(pkg) inst_pkgs.remove(pkg)
if pkg in inst_attempt_pkgs: if pkg in inst_attempt_pkgs:
bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs))
inst_attempt_pkgs.remove(pkg) inst_attempt_pkgs.remove(pkg)
d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs)) d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs))
@ -252,7 +252,7 @@ fakeroot python do_rootfs () {
# We have to delay the runtime_mapping_rename until just before rootfs runs # We have to delay the runtime_mapping_rename until just before rootfs runs
# otherwise, the multilib renaming could step in and squash any fixups that # otherwise, the multilib renaming could step in and squash any fixups that
# may have occurred. # may have occurred.
pn = d.getVar('PN', True) pn = d.getVar('PN')
runtime_mapping_rename("PACKAGE_INSTALL", pn, d) runtime_mapping_rename("PACKAGE_INSTALL", pn, d)
runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d) runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d)
runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d) runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d)
@ -275,7 +275,7 @@ addtask rootfs before do_build
fakeroot python do_image () { fakeroot python do_image () {
from oe.utils import execute_pre_post_process from oe.utils import execute_pre_post_process
pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND", True) pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND")
execute_pre_post_process(d, pre_process_cmds) execute_pre_post_process(d, pre_process_cmds)
} }
@ -286,7 +286,7 @@ addtask do_image after do_rootfs before do_build
fakeroot python do_image_complete () { fakeroot python do_image_complete () {
from oe.utils import execute_pre_post_process from oe.utils import execute_pre_post_process
post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND", True) post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND")
execute_pre_post_process(d, post_process_cmds) execute_pre_post_process(d, post_process_cmds)
} }
@ -309,7 +309,7 @@ addtask do_image_complete after do_image before do_build
fakeroot python do_image_qa () { fakeroot python do_image_qa () {
from oe.utils import ImageQAFailed from oe.utils import ImageQAFailed
qa_cmds = (d.getVar('IMAGE_QA_COMMANDS', True) or '').split() qa_cmds = (d.getVar('IMAGE_QA_COMMANDS') or '').split()
qamsg = "" qamsg = ""
for cmd in qa_cmds: for cmd in qa_cmds:
@ -324,7 +324,7 @@ fakeroot python do_image_qa () {
qamsg = qamsg + '\n' qamsg = qamsg + '\n'
if qamsg: if qamsg:
imgname = d.getVar('IMAGE_NAME', True) imgname = d.getVar('IMAGE_NAME')
bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg)) bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg))
} }
addtask do_image_qa after do_image_complete before do_build addtask do_image_qa after do_image_complete before do_build
@ -334,17 +334,17 @@ addtask do_image_qa after do_image_complete before do_build
# to tmp/sysroots/<machine>/imgdata/<image>.env # to tmp/sysroots/<machine>/imgdata/<image>.env
# #
python do_rootfs_wicenv () { python do_rootfs_wicenv () {
wicvars = d.getVar('WICVARS', True) wicvars = d.getVar('WICVARS')
if not wicvars: if not wicvars:
return return
stdir = d.getVar('STAGING_DIR_TARGET', True) stdir = d.getVar('STAGING_DIR_TARGET')
outdir = os.path.join(stdir, 'imgdata') outdir = os.path.join(stdir, 'imgdata')
bb.utils.mkdirhier(outdir) bb.utils.mkdirhier(outdir)
basename = d.getVar('IMAGE_BASENAME', True) basename = d.getVar('IMAGE_BASENAME')
with open(os.path.join(outdir, basename) + '.env', 'w') as envf: with open(os.path.join(outdir, basename) + '.env', 'w') as envf:
for var in wicvars.split(): for var in wicvars.split():
value = d.getVar(var, True) value = d.getVar(var)
if value: if value:
envf.write('%s="%s"\n' % (var, value.strip())) envf.write('%s="%s"\n' % (var, value.strip()))
} }
@ -357,7 +357,7 @@ def setup_debugfs_variables(d):
d.appendVar('IMAGE_LINK_NAME', '-dbg') d.appendVar('IMAGE_LINK_NAME', '-dbg')
d.appendVar('IMAGE_NAME','-dbg') d.appendVar('IMAGE_NAME','-dbg')
d.setVar('IMAGE_BUILDING_DEBUGFS', 'true') d.setVar('IMAGE_BUILDING_DEBUGFS', 'true')
debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True) debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS')
if debugfs_image_fstypes: if debugfs_image_fstypes:
d.setVar('IMAGE_FSTYPES', debugfs_image_fstypes) d.setVar('IMAGE_FSTYPES', debugfs_image_fstypes)
@ -375,7 +375,7 @@ python () {
# #
# Without de-duplication, gen_conversion_cmds() below # Without de-duplication, gen_conversion_cmds() below
# would create the same compression command multiple times. # would create the same compression command multiple times.
ctypes = set(d.getVar('CONVERSIONTYPES', True).split()) ctypes = set(d.getVar('CONVERSIONTYPES').split())
old_overrides = d.getVar('OVERRIDES', False) old_overrides = d.getVar('OVERRIDES', False)
def _image_base_type(type): def _image_base_type(type):
@ -392,11 +392,11 @@ python () {
return basetype return basetype
basetypes = {} basetypes = {}
alltypes = d.getVar('IMAGE_FSTYPES', True).split() alltypes = d.getVar('IMAGE_FSTYPES').split()
typedeps = {} typedeps = {}
if d.getVar('IMAGE_GEN_DEBUGFS', True) == "1": if d.getVar('IMAGE_GEN_DEBUGFS') == "1":
debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True).split() debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS').split()
for t in debugfs_fstypes: for t in debugfs_fstypes:
alltypes.append("debugfs_" + t) alltypes.append("debugfs_" + t)
@ -411,7 +411,7 @@ python () {
if t.startswith("debugfs_"): if t.startswith("debugfs_"):
t = t[8:] t = t[8:]
debug = "debugfs_" debug = "debugfs_"
deps = (d.getVar('IMAGE_TYPEDEP_' + t, True) or "").split() deps = (d.getVar('IMAGE_TYPEDEP_' + t) or "").split()
vardeps.add('IMAGE_TYPEDEP_' + t) vardeps.add('IMAGE_TYPEDEP_' + t)
if baset not in typedeps: if baset not in typedeps:
typedeps[baset] = set() typedeps[baset] = set()
@ -431,7 +431,7 @@ python () {
d.appendVarFlag('do_image', 'vardeps', ' '.join(vardeps)) d.appendVarFlag('do_image', 'vardeps', ' '.join(vardeps))
maskedtypes = (d.getVar('IMAGE_TYPES_MASKED', True) or "").split() maskedtypes = (d.getVar('IMAGE_TYPES_MASKED') or "").split()
maskedtypes = [dbg + t for t in maskedtypes for dbg in ("", "debugfs_")] maskedtypes = [dbg + t for t in maskedtypes for dbg in ("", "debugfs_")]
for t in basetypes: for t in basetypes:
@ -459,7 +459,7 @@ python () {
localdata.delVar('DATETIME') localdata.delVar('DATETIME')
localdata.delVar('TMPDIR') localdata.delVar('TMPDIR')
image_cmd = localdata.getVar("IMAGE_CMD", True) image_cmd = localdata.getVar("IMAGE_CMD")
vardeps.add('IMAGE_CMD_' + realt) vardeps.add('IMAGE_CMD_' + realt)
if image_cmd: if image_cmd:
cmds.append("\t" + image_cmd) cmds.append("\t" + image_cmd)
@ -481,7 +481,7 @@ python () {
# Create input image first. # Create input image first.
gen_conversion_cmds(type) gen_conversion_cmds(type)
localdata.setVar('type', type) localdata.setVar('type', type)
cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype, True) or localdata.getVar("COMPRESS_CMD_" + ctype, True)) cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype) or localdata.getVar("COMPRESS_CMD_" + ctype))
if cmd not in cmds: if cmd not in cmds:
cmds.append(cmd) cmds.append(cmd)
vardeps.add('CONVERSION_CMD_' + ctype) vardeps.add('CONVERSION_CMD_' + ctype)
@ -532,17 +532,17 @@ python () {
def get_rootfs_size(d): def get_rootfs_size(d):
import subprocess import subprocess
rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT', True)) rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT'))
overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR', True)) overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR'))
rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE', True)) rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE'))
rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE', True)) rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE'))
rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE', True) rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE')
image_fstypes = d.getVar('IMAGE_FSTYPES', True) or '' image_fstypes = d.getVar('IMAGE_FSTYPES') or ''
initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES', True) or '' initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES') or ''
initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE', True) initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE')
output = subprocess.check_output(['du', '-ks', output = subprocess.check_output(['du', '-ks',
d.getVar('IMAGE_ROOTFS', True)]) d.getVar('IMAGE_ROOTFS')])
size_kb = int(output.split()[0]) size_kb = int(output.split()[0])
base_size = size_kb * overhead_factor base_size = size_kb * overhead_factor
base_size = max(base_size, rootfs_req_size) + rootfs_extra_space base_size = max(base_size, rootfs_req_size) + rootfs_extra_space
@ -558,7 +558,7 @@ def get_rootfs_size(d):
# Do not check image size of the debugfs image. This is not supposed # Do not check image size of the debugfs image. This is not supposed
# to be deployed, etc. so it doesn't make sense to limit the size # to be deployed, etc. so it doesn't make sense to limit the size
# of the debug. # of the debug.
if (d.getVar('IMAGE_BUILDING_DEBUGFS', True) or "") == "true": if (d.getVar('IMAGE_BUILDING_DEBUGFS') or "") == "true":
return base_size return base_size
# Check the rootfs size against IMAGE_ROOTFS_MAXSIZE (if set) # Check the rootfs size against IMAGE_ROOTFS_MAXSIZE (if set)
@ -589,11 +589,11 @@ python set_image_size () {
# #
python create_symlinks() { python create_symlinks() {
deploy_dir = d.getVar('IMGDEPLOYDIR', True) deploy_dir = d.getVar('IMGDEPLOYDIR')
img_name = d.getVar('IMAGE_NAME', True) img_name = d.getVar('IMAGE_NAME')
link_name = d.getVar('IMAGE_LINK_NAME', True) link_name = d.getVar('IMAGE_LINK_NAME')
manifest_name = d.getVar('IMAGE_MANIFEST', True) manifest_name = d.getVar('IMAGE_MANIFEST')
taskname = d.getVar("BB_CURRENTTASK", True) taskname = d.getVar("BB_CURRENTTASK")
subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split() subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split()
imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.") imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.")

View File

@ -17,20 +17,20 @@ def imagetypes_getdepends(d):
d += ":do_populate_sysroot" d += ":do_populate_sysroot"
deps.add(d) deps.add(d)
fstypes = set((d.getVar('IMAGE_FSTYPES', True) or "").split()) fstypes = set((d.getVar('IMAGE_FSTYPES') or "").split())
fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS', True) or "").split()) fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS') or "").split())
deps = set() deps = set()
for typestring in fstypes: for typestring in fstypes:
types = typestring.split(".") types = typestring.split(".")
basetype, resttypes = types[0], types[1:] basetype, resttypes = types[0], types[1:]
adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype, True) , deps) adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype) , deps)
for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype, True) or "").split(): for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype) or "").split():
adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends, True) , deps) adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends) , deps)
for ctype in resttypes: for ctype in resttypes:
adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype, True), deps) adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype), deps)
adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype, True), deps) adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype), deps)
# Sort the set so that ordering is consistant # Sort the set so that ordering is consistant
return " ".join(sorted(deps)) return " ".join(sorted(deps))
@ -220,7 +220,7 @@ WKS_FILE_CHECKSUM = "${@'${WKS_FULL_PATH}:%s' % os.path.exists('${WKS_FULL_PATH}
do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}" do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}"
python () { python () {
if d.getVar('USING_WIC', True) and 'do_bootimg' in d: if d.getVar('USING_WIC') and 'do_bootimg' in d:
bb.build.addtask('do_image_wic', '', 'do_bootimg', d) bb.build.addtask('do_image_wic', '', 'do_bootimg', d)
} }
@ -228,7 +228,7 @@ python do_write_wks_template () {
"""Write out expanded template contents to WKS_FULL_PATH.""" """Write out expanded template contents to WKS_FULL_PATH."""
import re import re
template_body = d.getVar('_WKS_TEMPLATE', True) template_body = d.getVar('_WKS_TEMPLATE')
# Remove any remnant variable references left behind by the expansion # Remove any remnant variable references left behind by the expansion
# due to undefined variables # due to undefined variables
@ -240,18 +240,18 @@ python do_write_wks_template () {
else: else:
template_body = new_body template_body = new_body
wks_file = d.getVar('WKS_FULL_PATH', True) wks_file = d.getVar('WKS_FULL_PATH')
with open(wks_file, 'w') as f: with open(wks_file, 'w') as f:
f.write(template_body) f.write(template_body)
} }
python () { python () {
if d.getVar('USING_WIC', True): if d.getVar('USING_WIC'):
wks_file_u = d.getVar('WKS_FULL_PATH', False) wks_file_u = d.getVar('WKS_FULL_PATH', False)
wks_file = d.expand(wks_file_u) wks_file = d.expand(wks_file_u)
base, ext = os.path.splitext(wks_file) base, ext = os.path.splitext(wks_file)
if ext == '.in' and os.path.exists(wks_file): if ext == '.in' and os.path.exists(wks_file):
wks_out_file = os.path.join(d.getVar('WORKDIR', True), os.path.basename(base)) wks_out_file = os.path.join(d.getVar('WORKDIR'), os.path.basename(base))
d.setVar('WKS_FULL_PATH', wks_out_file) d.setVar('WKS_FULL_PATH', wks_out_file)
d.setVar('WKS_TEMPLATE_PATH', wks_file_u) d.setVar('WKS_TEMPLATE_PATH', wks_file_u)
d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True') d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True')

View File

@ -179,7 +179,7 @@ def package_qa_get_machine_dict(d):
# Add in any extra user supplied data which may come from a BSP layer, removing the # Add in any extra user supplied data which may come from a BSP layer, removing the
# need to always change this class directly # need to always change this class directly
extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS", True) or "").split() extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS") or "").split()
for m in extra_machdata: for m in extra_machdata:
call = m + "(machdata, d)" call = m + "(machdata, d)"
locs = { "machdata" : machdata, "d" : d} locs = { "machdata" : machdata, "d" : d}
@ -194,23 +194,23 @@ def package_qa_clean_path(path, d, pkg=None):
TMPDIR is stripped, otherwise PKGDEST/pkg is stripped. TMPDIR is stripped, otherwise PKGDEST/pkg is stripped.
""" """
if pkg: if pkg:
path = path.replace(os.path.join(d.getVar("PKGDEST", True), pkg), "/") path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/")
return path.replace(d.getVar("TMPDIR", True), "/").replace("//", "/") return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
def package_qa_write_error(type, error, d): def package_qa_write_error(type, error, d):
logfile = d.getVar('QA_LOGFILE', True) logfile = d.getVar('QA_LOGFILE')
if logfile: if logfile:
p = d.getVar('P', True) p = d.getVar('P')
with open(logfile, "a+") as f: with open(logfile, "a+") as f:
f.write("%s: %s [%s]\n" % (p, error, type)) f.write("%s: %s [%s]\n" % (p, error, type))
def package_qa_handle_error(error_class, error_msg, d): def package_qa_handle_error(error_class, error_msg, d):
package_qa_write_error(error_class, error_msg, d) package_qa_write_error(error_class, error_msg, d)
if error_class in (d.getVar("ERROR_QA", True) or "").split(): if error_class in (d.getVar("ERROR_QA") or "").split():
bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
d.setVar("QA_SANE", False) d.setVar("QA_SANE", False)
return False return False
elif error_class in (d.getVar("WARN_QA", True) or "").split(): elif error_class in (d.getVar("WARN_QA") or "").split():
bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
else: else:
bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
@ -226,7 +226,7 @@ QAPATHTEST[libexec] = "package_qa_check_libexec"
def package_qa_check_libexec(path,name, d, elf, messages): def package_qa_check_libexec(path,name, d, elf, messages):
# Skip the case where the default is explicitly /usr/libexec # Skip the case where the default is explicitly /usr/libexec
libexec = d.getVar('libexecdir', True) libexec = d.getVar('libexecdir')
if libexec == "/usr/libexec": if libexec == "/usr/libexec":
return True return True
@ -247,7 +247,7 @@ def package_qa_check_rpath(file,name, d, elf, messages):
if os.path.islink(file): if os.path.islink(file):
return return
bad_dirs = [d.getVar('BASE_WORKDIR', True), d.getVar('STAGING_DIR_TARGET', True)] bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')]
phdrs = elf.run_objdump("-p", d) phdrs = elf.run_objdump("-p", d)
@ -275,8 +275,8 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
if os.path.islink(file): if os.path.islink(file):
return return
libdir = d.getVar("libdir", True) libdir = d.getVar("libdir")
base_libdir = d.getVar("base_libdir", True) base_libdir = d.getVar("base_libdir")
phdrs = elf.run_objdump("-p", d) phdrs = elf.run_objdump("-p", d)
@ -333,11 +333,11 @@ def package_qa_check_libdir(d):
""" """
import re import re
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
base_libdir = d.getVar("base_libdir",True) + os.sep base_libdir = d.getVar("base_libdir",True) + os.sep
libdir = d.getVar("libdir", True) + os.sep libdir = d.getVar("libdir") + os.sep
libexecdir = d.getVar("libexecdir", True) + os.sep libexecdir = d.getVar("libexecdir") + os.sep
exec_prefix = d.getVar("exec_prefix", True) + os.sep exec_prefix = d.getVar("exec_prefix") + os.sep
messages = [] messages = []
@ -352,10 +352,10 @@ def package_qa_check_libdir(d):
# Skip subdirectories for any packages with libdir in INSANE_SKIP # Skip subdirectories for any packages with libdir in INSANE_SKIP
skippackages = [] skippackages = []
for package in dirs: for package in dirs:
if 'libdir' in (d.getVar('INSANE_SKIP_' + package, True) or "").split(): if 'libdir' in (d.getVar('INSANE_SKIP_' + package) or "").split():
bb.note("Package %s skipping libdir QA test" % (package)) bb.note("Package %s skipping libdir QA test" % (package))
skippackages.append(package) skippackages.append(package)
elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory' and package.endswith("-dbg"): elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"):
bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package)) bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package))
skippackages.append(package) skippackages.append(package)
for package in skippackages: for package in skippackages:
@ -416,10 +416,10 @@ def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages)
if elf: if elf:
import subprocess as sub import subprocess as sub
pn = d.getVar('PN', True) pn = d.getVar('PN')
exec_prefix = d.getVar('exec_prefix', True) exec_prefix = d.getVar('exec_prefix')
sysroot_path = d.getVar('STAGING_DIR_TARGET', True) sysroot_path = d.getVar('STAGING_DIR_TARGET')
sysroot_path_usr = sysroot_path + exec_prefix sysroot_path_usr = sysroot_path + exec_prefix
try: try:
@ -432,8 +432,8 @@ def package_qa_check_unsafe_references_in_binaries(path, name, d, elf, messages)
if sysroot_path_usr in ldd_output: if sysroot_path_usr in ldd_output:
ldd_output = ldd_output.replace(sysroot_path, "") ldd_output = ldd_output.replace(sysroot_path, "")
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
for package in packages.split(): for package in packages.split():
short_path = path.replace('%s/%s' % (pkgdest, package), "", 1) short_path = path.replace('%s/%s' % (pkgdest, package), "", 1)
@ -459,13 +459,13 @@ def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages):
if not elf: if not elf:
import stat import stat
import subprocess import subprocess
pn = d.getVar('PN', True) pn = d.getVar('PN')
# Ensure we're checking an executable script # Ensure we're checking an executable script
statinfo = os.stat(path) statinfo = os.stat(path)
if bool(statinfo.st_mode & stat.S_IXUSR): if bool(statinfo.st_mode & stat.S_IXUSR):
# grep shell scripts for possible references to /exec_prefix/ # grep shell scripts for possible references to /exec_prefix/
exec_prefix = d.getVar('exec_prefix', True) exec_prefix = d.getVar('exec_prefix')
statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path) statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path)
if subprocess.call(statement, shell=True) == 0: if subprocess.call(statement, shell=True) == 0:
error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path) error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path)
@ -489,19 +489,19 @@ def unsafe_references_skippable(path, name, d):
return True return True
# Skip unusual rootfs layouts which make these tests irrelevant # Skip unusual rootfs layouts which make these tests irrelevant
exec_prefix = d.getVar('exec_prefix', True) exec_prefix = d.getVar('exec_prefix')
if exec_prefix == "": if exec_prefix == "":
return True return True
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
pkgdest = pkgdest + "/" + name pkgdest = pkgdest + "/" + name
pkgdest = os.path.abspath(pkgdest) pkgdest = os.path.abspath(pkgdest)
base_bindir = pkgdest + d.getVar('base_bindir', True) base_bindir = pkgdest + d.getVar('base_bindir')
base_sbindir = pkgdest + d.getVar('base_sbindir', True) base_sbindir = pkgdest + d.getVar('base_sbindir')
base_libdir = pkgdest + d.getVar('base_libdir', True) base_libdir = pkgdest + d.getVar('base_libdir')
bindir = pkgdest + d.getVar('bindir', True) bindir = pkgdest + d.getVar('bindir')
sbindir = pkgdest + d.getVar('sbindir', True) sbindir = pkgdest + d.getVar('sbindir')
libdir = pkgdest + d.getVar('libdir', True) libdir = pkgdest + d.getVar('libdir')
if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir: if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir:
return True return True
@ -523,13 +523,13 @@ def package_qa_check_arch(path,name,d, elf, messages):
if not elf: if not elf:
return return
target_os = d.getVar('TARGET_OS', True) target_os = d.getVar('TARGET_OS')
target_arch = d.getVar('TARGET_ARCH', True) target_arch = d.getVar('TARGET_ARCH')
provides = d.getVar('PROVIDES', True) provides = d.getVar('PROVIDES')
bpn = d.getVar('BPN', True) bpn = d.getVar('BPN')
if target_arch == "allarch": if target_arch == "allarch":
pn = d.getVar('PN', True) pn = d.getVar('PN')
package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
return return
@ -549,7 +549,7 @@ def package_qa_check_arch(path,name,d, elf, messages):
# Check the architecture and endiannes of the binary # Check the architecture and endiannes of the binary
is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \
(target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE', True))) (target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE')))
if not ((machine == elf.machine()) or is_32): if not ((machine == elf.machine()) or is_32):
package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \ package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \
(oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d))) (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d)))
@ -608,9 +608,9 @@ def package_qa_hash_style(path, name, d, elf, messages):
if os.path.islink(path): if os.path.islink(path):
return return
gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True) gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS')
if not gnu_hash: if not gnu_hash:
gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True) gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS')
if not gnu_hash: if not gnu_hash:
return return
@ -649,7 +649,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1: if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1:
return return
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
with open(path, 'rb') as f: with open(path, 'rb') as f:
file_content = f.read().decode('utf-8', errors='ignore') file_content = f.read().decode('utf-8', errors='ignore')
if tmpdir in file_content: if tmpdir in file_content:
@ -668,8 +668,8 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
driverdir = d.expand("${libdir}/xorg/modules/drivers/") driverdir = d.expand("${libdir}/xorg/modules/drivers/")
if driverdir in path and path.endswith(".so"): if driverdir in path and path.endswith(".so"):
mlprefix = d.getVar('MLPREFIX', True) or '' mlprefix = d.getVar('MLPREFIX') or ''
for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name, True) or ""): for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name) or ""):
if rdep.startswith("%sxorg-abi-" % mlprefix): if rdep.startswith("%sxorg-abi-" % mlprefix):
return return
package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
@ -692,9 +692,9 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
if os.path.islink(path): if os.path.islink(path):
target = os.readlink(path) target = os.readlink(path)
if os.path.isabs(target): if os.path.isabs(target):
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
if target.startswith(tmpdir): if target.startswith(tmpdir):
trimmed = path.replace(os.path.join (d.getVar("PKGDEST", True), name), "") trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
# Check license variables # Check license variables
@ -706,17 +706,17 @@ python populate_lic_qa_checksum() {
import tempfile import tempfile
sane = True sane = True
lic_files = d.getVar('LIC_FILES_CHKSUM', True) or '' lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
lic = d.getVar('LICENSE', True) lic = d.getVar('LICENSE')
pn = d.getVar('PN', True) pn = d.getVar('PN')
if lic == "CLOSED": if lic == "CLOSED":
return return
if not lic_files and d.getVar('SRC_URI', True): if not lic_files and d.getVar('SRC_URI'):
sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
srcdir = d.getVar('S', True) srcdir = d.getVar('S')
for url in lic_files.split(): for url in lic_files.split():
try: try:
@ -794,7 +794,7 @@ def package_qa_check_staged(path,d):
""" """
sane = True sane = True
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
workdir = os.path.join(tmpdir, "work") workdir = os.path.join(tmpdir, "work")
if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
@ -845,8 +845,8 @@ def package_qa_walk(warnfuncs, errorfuncs, skip, package, d):
import oe.qa import oe.qa
#if this will throw an exception, then fix the dict above #if this will throw an exception, then fix the dict above
target_os = d.getVar('TARGET_OS', True) target_os = d.getVar('TARGET_OS')
target_arch = d.getVar('TARGET_ARCH', True) target_arch = d.getVar('TARGET_ARCH')
warnings = {} warnings = {}
errors = {} errors = {}
@ -879,7 +879,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
bb.data.update_data(localdata) bb.data.update_data(localdata)
# Now check the RDEPENDS # Now check the RDEPENDS
rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "") rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "")
# Now do the sanity check!!! # Now do the sanity check!!!
if "build-deps" not in skip: if "build-deps" not in skip:
@ -895,7 +895,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
continue continue
if not rdep_data or not 'PN' in rdep_data: if not rdep_data or not 'PN' in rdep_data:
pkgdata_dir = d.getVar("PKGDATA_DIR", True) pkgdata_dir = d.getVar("PKGDATA_DIR")
try: try:
possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend)) possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
except OSError: except OSError:
@ -954,7 +954,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
# The python is not a package, but python-core provides it, so # The python is not a package, but python-core provides it, so
# skip checking /usr/bin/python if python is in the rdeps, in # skip checking /usr/bin/python if python is in the rdeps, in
# case there is a RDEPENDS_pkg = "python" in the recipe. # case there is a RDEPENDS_pkg = "python" in the recipe.
for py in [ d.getVar('MLPREFIX', True) + "python", "python" ]: for py in [ d.getVar('MLPREFIX') + "python", "python" ]:
if py in done: if py in done:
filerdepends.pop("/usr/bin/python",None) filerdepends.pop("/usr/bin/python",None)
done.remove(py) done.remove(py)
@ -987,7 +987,7 @@ def package_qa_check_deps(pkg, pkgdest, skip, d):
def check_valid_deps(var): def check_valid_deps(var):
try: try:
rvar = bb.utils.explode_dep_versions2(localdata.getVar(var, True) or "") rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "")
except ValueError as e: except ValueError as e:
bb.fatal("%s_%s: %s" % (var, pkg, e)) bb.fatal("%s_%s: %s" % (var, pkg, e))
for dep in rvar: for dep in rvar:
@ -1010,10 +1010,10 @@ def package_qa_check_expanded_d(package, d, messages):
variables, warn the user to use it correctly. variables, warn the user to use it correctly.
""" """
sane = True sane = True
expanded_d = d.getVar('D', True) expanded_d = d.getVar('D')
for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
bbvar = d.getVar(var + "_" + package, True) or "" bbvar = d.getVar(var + "_" + package) or ""
if expanded_d in bbvar: if expanded_d in bbvar:
if var == 'FILES': if var == 'FILES':
package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
@ -1026,7 +1026,7 @@ def package_qa_check_expanded_d(package, d, messages):
def package_qa_check_encoding(keys, encode, d): def package_qa_check_encoding(keys, encode, d):
def check_encoding(key, enc): def check_encoding(key, enc):
sane = True sane = True
value = d.getVar(key, True) value = d.getVar(key)
if value: if value:
try: try:
s = value.encode(enc) s = value.encode(enc)
@ -1051,8 +1051,8 @@ def package_qa_check_host_user(path, name, d, elf, messages):
if not os.path.lexists(path): if not os.path.lexists(path):
return return
dest = d.getVar('PKGDEST', True) dest = d.getVar('PKGDEST')
pn = d.getVar('PN', True) pn = d.getVar('PN')
home = os.path.join(dest, 'home') home = os.path.join(dest, 'home')
if path == home or path.startswith(home + os.sep): if path == home or path.startswith(home + os.sep):
return return
@ -1065,12 +1065,12 @@ def package_qa_check_host_user(path, name, d, elf, messages):
raise raise
else: else:
rootfs_path = path[len(dest):] rootfs_path = path[len(dest):]
check_uid = int(d.getVar('HOST_USER_UID', True)) check_uid = int(d.getVar('HOST_USER_UID'))
if stat.st_uid == check_uid: if stat.st_uid == check_uid:
package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid)) package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid))
return False return False
check_gid = int(d.getVar('HOST_USER_GID', True)) check_gid = int(d.getVar('HOST_USER_GID'))
if stat.st_gid == check_gid: if stat.st_gid == check_gid:
package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid)) package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid))
return False return False
@ -1088,8 +1088,8 @@ python do_package_qa () {
# Check non UTF-8 characters on recipe's metadata # Check non UTF-8 characters on recipe's metadata
package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d) package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d)
logdir = d.getVar('T', True) logdir = d.getVar('T')
pkg = d.getVar('PN', True) pkg = d.getVar('PN')
# Check the compile log for host contamination # Check the compile log for host contamination
compilelog = os.path.join(logdir,"log.do_compile") compilelog = os.path.join(logdir,"log.do_compile")
@ -1112,8 +1112,8 @@ python do_package_qa () {
package_qa_handle_error("install-host-path", msg, d) package_qa_handle_error("install-host-path", msg, d)
# Scan the packages... # Scan the packages...
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
packages = set((d.getVar('PACKAGES', True) or '').split()) packages = set((d.getVar('PACKAGES') or '').split())
cpath = oe.cachedpath.CachedPath() cpath = oe.cachedpath.CachedPath()
global pkgfiles global pkgfiles
@ -1142,7 +1142,7 @@ python do_package_qa () {
testmatrix = d.getVarFlags(matrix_name) or {} testmatrix = d.getVarFlags(matrix_name) or {}
g = globals() g = globals()
warnchecks = [] warnchecks = []
for w in (d.getVar("WARN_QA", True) or "").split(): for w in (d.getVar("WARN_QA") or "").split():
if w in skip: if w in skip:
continue continue
if w in testmatrix and testmatrix[w] in g: if w in testmatrix and testmatrix[w] in g:
@ -1151,7 +1151,7 @@ python do_package_qa () {
oe.utils.write_ld_so_conf(d) oe.utils.write_ld_so_conf(d)
errorchecks = [] errorchecks = []
for e in (d.getVar("ERROR_QA", True) or "").split(): for e in (d.getVar("ERROR_QA") or "").split():
if e in skip: if e in skip:
continue continue
if e in testmatrix and testmatrix[e] in g: if e in testmatrix and testmatrix[e] in g:
@ -1160,7 +1160,7 @@ python do_package_qa () {
oe.utils.write_ld_so_conf(d) oe.utils.write_ld_so_conf(d)
return warnchecks, errorchecks return warnchecks, errorchecks
skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split() skip = (d.getVar('INSANE_SKIP_' + package) or "").split()
if skip: if skip:
bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) bb.note("Package %s skipping QA tests: %s" % (package, str(skip)))
@ -1180,10 +1180,10 @@ python do_package_qa () {
package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d)
package_qa_check_deps(package, pkgdest, skip, d) package_qa_check_deps(package, pkgdest, skip, d)
if 'libdir' in d.getVar("ALL_QA", True).split(): if 'libdir' in d.getVar("ALL_QA").split():
package_qa_check_libdir(d) package_qa_check_libdir(d)
qa_sane = d.getVar("QA_SANE", True) qa_sane = d.getVar("QA_SANE")
if not qa_sane: if not qa_sane:
bb.fatal("QA run found fatal errors. Please consider fixing them.") bb.fatal("QA run found fatal errors. Please consider fixing them.")
bb.note("DONE with PACKAGE QA") bb.note("DONE with PACKAGE QA")
@ -1216,7 +1216,7 @@ python do_qa_configure() {
########################################################################### ###########################################################################
configs = [] configs = []
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
if bb.data.inherits_class('autotools', d): if bb.data.inherits_class('autotools', d):
bb.note("Checking autotools environment for common misconfiguration") bb.note("Checking autotools environment for common misconfiguration")
@ -1237,16 +1237,16 @@ Rerun configure task after fixing this.""")
# Check gettext configuration and dependencies are correct # Check gettext configuration and dependencies are correct
########################################################################### ###########################################################################
cnf = d.getVar('EXTRA_OECONF', True) or "" cnf = d.getVar('EXTRA_OECONF') or ""
if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: if "gettext" not in d.getVar('P') and "gcc-runtime" not in d.getVar('P') and "--disable-nls" not in cnf:
ml = d.getVar("MLPREFIX", True) or "" ml = d.getVar("MLPREFIX") or ""
if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d):
gt = "gettext-native" gt = "gettext-native"
elif bb.data.inherits_class('cross-canadian', d): elif bb.data.inherits_class('cross-canadian', d):
gt = "nativesdk-gettext" gt = "nativesdk-gettext"
else: else:
gt = "virtual/" + ml + "gettext" gt = "virtual/" + ml + "gettext"
deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "")
if gt not in deps: if gt not in deps:
for config in configs: for config in configs:
gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
@ -1261,40 +1261,40 @@ Missing inherit gettext?""" % (gt, config))
bb.note("Checking configure output for unrecognised options") bb.note("Checking configure output for unrecognised options")
try: try:
flag = "WARNING: unrecognized options:" flag = "WARNING: unrecognized options:"
log = os.path.join(d.getVar('B', True), 'config.log') log = os.path.join(d.getVar('B'), 'config.log')
output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ') output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ')
options = set() options = set()
for line in output.splitlines(): for line in output.splitlines():
options |= set(line.partition(flag)[2].split()) options |= set(line.partition(flag)[2].split())
whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST", True).split()) whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST").split())
options -= whitelist options -= whitelist
if options: if options:
pn = d.getVar('PN', True) pn = d.getVar('PN')
error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
package_qa_handle_error("unknown-configure-option", error_msg, d) package_qa_handle_error("unknown-configure-option", error_msg, d)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
pass pass
# Check invalid PACKAGECONFIG # Check invalid PACKAGECONFIG
pkgconfig = (d.getVar("PACKAGECONFIG", True) or "").split() pkgconfig = (d.getVar("PACKAGECONFIG") or "").split()
if pkgconfig: if pkgconfig:
pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
for pconfig in pkgconfig: for pconfig in pkgconfig:
if pconfig not in pkgconfigflags: if pconfig not in pkgconfigflags:
pn = d.getVar('PN', True) pn = d.getVar('PN')
error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
package_qa_handle_error("invalid-packageconfig", error_msg, d) package_qa_handle_error("invalid-packageconfig", error_msg, d)
qa_sane = d.getVar("QA_SANE", True) qa_sane = d.getVar("QA_SANE")
if not qa_sane: if not qa_sane:
bb.fatal("Fatal QA errors found, failing task.") bb.fatal("Fatal QA errors found, failing task.")
} }
python do_qa_unpack() { python do_qa_unpack() {
src_uri = d.getVar('SRC_URI', True) src_uri = d.getVar('SRC_URI')
s_dir = d.getVar('S', True) s_dir = d.getVar('S')
if src_uri and not os.path.exists(s_dir): if src_uri and not os.path.exists(s_dir):
bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN', True), d.getVar('S', False), s_dir)) bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir))
} }
# The Staging Func, to check all staging # The Staging Func, to check all staging
@ -1310,7 +1310,7 @@ do_configure[postfuncs] += "do_qa_configure "
do_unpack[postfuncs] += "do_qa_unpack" do_unpack[postfuncs] += "do_qa_unpack"
python () { python () {
tests = d.getVar('ALL_QA', True).split() tests = d.getVar('ALL_QA').split()
if "desktop" in tests: if "desktop" in tests:
d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native")
@ -1319,7 +1319,7 @@ python () {
########################################################################### ###########################################################################
# Checking ${FILESEXTRAPATHS} # Checking ${FILESEXTRAPATHS}
extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") extrapaths = (d.getVar("FILESEXTRAPATHS") or "")
if '__default' not in extrapaths.split(":"): if '__default' not in extrapaths.split(":"):
msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n" msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n"
msg += "type of assignment, and don't forget the colon.\n" msg += "type of assignment, and don't forget the colon.\n"
@ -1331,29 +1331,29 @@ python () {
msg += "%s\n" % extrapaths msg += "%s\n" % extrapaths
bb.warn(msg) bb.warn(msg)
overrides = d.getVar('OVERRIDES', True).split(':') overrides = d.getVar('OVERRIDES').split(':')
pn = d.getVar('PN', True) pn = d.getVar('PN')
if pn in overrides: if pn in overrides:
msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE", True), pn) msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn)
package_qa_handle_error("pn-overrides", msg, d) package_qa_handle_error("pn-overrides", msg, d)
issues = [] issues = []
if (d.getVar('PACKAGES', True) or "").split(): if (d.getVar('PACKAGES') or "").split():
for dep in (d.getVar('QADEPENDS', True) or "").split(): for dep in (d.getVar('QADEPENDS') or "").split():
d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep) d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep)
for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY':
if d.getVar(var, False): if d.getVar(var, False):
issues.append(var) issues.append(var)
fakeroot_tests = d.getVar('FAKEROOT_QA', True).split() fakeroot_tests = d.getVar('FAKEROOT_QA').split()
if set(tests) & set(fakeroot_tests): if set(tests) & set(fakeroot_tests):
d.setVarFlag('do_package_qa', 'fakeroot', '1') d.setVarFlag('do_package_qa', 'fakeroot', '1')
d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot')
else: else:
d.setVarFlag('do_package_qa', 'rdeptask', '') d.setVarFlag('do_package_qa', 'rdeptask', '')
for i in issues: for i in issues:
package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE", True), i), d) package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
qa_sane = d.getVar("QA_SANE", True) qa_sane = d.getVar("QA_SANE")
if not qa_sane: if not qa_sane:
bb.fatal("Fatal QA errors found, failing task.") bb.fatal("Fatal QA errors found, failing task.")
} }

View File

@ -19,7 +19,7 @@ valid_archs = "alpha cris ia64 \
def map_kernel_arch(a, d): def map_kernel_arch(a, d):
import re import re
valid_archs = d.getVar('valid_archs', True).split() valid_archs = d.getVar('valid_archs').split()
if re.match('(i.86|athlon|x86.64)$', a): return 'x86' if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
elif re.match('armeb$', a): return 'arm' elif re.match('armeb$', a): return 'arm'
@ -34,7 +34,7 @@ def map_kernel_arch(a, d):
else: else:
bb.error("cannot map '%s' to a linux kernel architecture" % a) bb.error("cannot map '%s' to a linux kernel architecture" % a)
export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH'), d)}"
def map_uboot_arch(a, d): def map_uboot_arch(a, d):
import re import re
@ -43,7 +43,7 @@ def map_uboot_arch(a, d):
elif re.match('i.86$', a): return 'x86' elif re.match('i.86$', a): return 'x86'
return a return a
export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH'), d)}"
# Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture # Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture
# specific options necessary for building the kernel and modules. # specific options necessary for building the kernel and modules.

View File

@ -1,13 +1,13 @@
inherit kernel-uboot uboot-sign inherit kernel-uboot uboot-sign
python __anonymous () { python __anonymous () {
kerneltypes = d.getVar('KERNEL_IMAGETYPES', True) or "" kerneltypes = d.getVar('KERNEL_IMAGETYPES') or ""
if 'fitImage' in kerneltypes.split(): if 'fitImage' in kerneltypes.split():
depends = d.getVar("DEPENDS", True) depends = d.getVar("DEPENDS")
depends = "%s u-boot-mkimage-native dtc-native" % depends depends = "%s u-boot-mkimage-native dtc-native" % depends
d.setVar("DEPENDS", depends) d.setVar("DEPENDS", depends)
if d.getVar("UBOOT_ARCH", True) == "x86": if d.getVar("UBOOT_ARCH") == "x86":
replacementtype = "bzImage" replacementtype = "bzImage"
else: else:
replacementtype = "zImage" replacementtype = "zImage"
@ -15,19 +15,19 @@ python __anonymous () {
# Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal
# to kernel.bbclass . We have to override it, since we pack zImage # to kernel.bbclass . We have to override it, since we pack zImage
# (at least for now) into the fitImage . # (at least for now) into the fitImage .
typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or ""
if 'fitImage' in typeformake.split(): if 'fitImage' in typeformake.split():
d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype)) d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype))
image = d.getVar('INITRAMFS_IMAGE', True) image = d.getVar('INITRAMFS_IMAGE')
if image: if image:
d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
# Verified boot will sign the fitImage and append the public key to # Verified boot will sign the fitImage and append the public key to
# U-boot dtb. We ensure the U-Boot dtb is deployed before assembling # U-boot dtb. We ensure the U-Boot dtb is deployed before assembling
# the fitImage: # the fitImage:
if d.getVar('UBOOT_SIGN_ENABLE', True): if d.getVar('UBOOT_SIGN_ENABLE'):
uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot', True) or 'u-boot' uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot'
d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn) d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn)
} }

View File

@ -92,7 +92,7 @@ python __anonymous () {
fi fi
''' '''
imagetypes = d.getVar('KERNEL_IMAGETYPES', True) imagetypes = d.getVar('KERNEL_IMAGETYPES')
imagetypes = re.sub(r'\.gz$', '', imagetypes) imagetypes = re.sub(r'\.gz$', '', imagetypes)
for type in imagetypes.split(): for type in imagetypes.split():

View File

@ -39,10 +39,10 @@ python split_kernel_module_packages () {
def extract_modinfo(file): def extract_modinfo(file):
import tempfile, subprocess import tempfile, subprocess
tempfile.tempdir = d.getVar("WORKDIR", True) tempfile.tempdir = d.getVar("WORKDIR")
tf = tempfile.mkstemp() tf = tempfile.mkstemp()
tmpfile = tf[1] tmpfile = tf[1]
cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX", True) or "", file, tmpfile) cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX") or "", file, tmpfile)
subprocess.call(cmd, shell=True) subprocess.call(cmd, shell=True)
f = open(tmpfile) f = open(tmpfile)
l = f.read().split("\000") l = f.read().split("\000")
@ -60,12 +60,12 @@ python split_kernel_module_packages () {
def frob_metadata(file, pkg, pattern, format, basename): def frob_metadata(file, pkg, pattern, format, basename):
vals = extract_modinfo(file) vals = extract_modinfo(file)
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
# If autoloading is requested, output /etc/modules-load.d/<name>.conf and append # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
# appropriate modprobe commands to the postinst # appropriate modprobe commands to the postinst
autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD", True) or "").split() autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD") or "").split()
autoload = d.getVar('module_autoload_%s' % basename, True) autoload = d.getVar('module_autoload_%s' % basename)
if autoload and autoload == basename: if autoload and autoload == basename:
bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename) bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename)
if autoload and basename not in autoloadlist: if autoload and basename not in autoloadlist:
@ -79,15 +79,15 @@ python split_kernel_module_packages () {
else: else:
f.write('%s\n' % basename) f.write('%s\n' % basename)
f.close() f.close()
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg) bb.fatal("pkg_postinst_%s not defined" % pkg)
postinst += d.getVar('autoload_postinst_fragment', True) % (autoload or basename) postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
# Write out any modconf fragment # Write out any modconf fragment
modconflist = (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split() modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()
modconf = d.getVar('module_conf_%s' % basename, True) modconf = d.getVar('module_conf_%s' % basename)
if modconf and basename in modconflist: if modconf and basename in modconflist:
name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
f = open(name, 'w') f = open(name, 'w')
@ -96,15 +96,15 @@ python split_kernel_module_packages () {
elif modconf: elif modconf:
bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename)) bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename))
files = d.getVar('FILES_%s' % pkg, True) files = d.getVar('FILES_%s' % pkg)
files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
d.setVar('FILES_%s' % pkg, files) d.setVar('FILES_%s' % pkg, files)
if "description" in vals: if "description" in vals:
old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" old_desc = d.getVar('DESCRIPTION_' + pkg) or ""
d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
modinfo_deps = [] modinfo_deps = []
if "depends" in vals and vals["depends"] != "": if "depends" in vals and vals["depends"] != "":
for dep in vals["depends"].split(","): for dep in vals["depends"].split(","):
@ -121,24 +121,24 @@ python split_kernel_module_packages () {
module_regex = '^(.*)\.k?o$' module_regex = '^(.*)\.k?o$'
module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX', True) module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX')
module_pattern = module_pattern_prefix + 'kernel-module-%s' module_pattern = module_pattern_prefix + 'kernel-module-%s'
postinst = d.getVar('pkg_postinst_modules', True) postinst = d.getVar('pkg_postinst_modules')
postrm = d.getVar('pkg_postrm_modules', True) postrm = d.getVar('pkg_postrm_modules')
modules = do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION", True))) modules = do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION")))
if modules: if modules:
metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE', True) metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE')
d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules)) d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules))
# If modules-load.d and modprobe.d are empty at this point, remove them to # If modules-load.d and modprobe.d are empty at this point, remove them to
# avoid warnings. removedirs only raises an OSError if an empty # avoid warnings. removedirs only raises an OSError if an empty
# directory cannot be removed. # directory cannot be removed.
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]: for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
if len(os.listdir(dir)) == 0: if len(os.listdir(dir)) == 0:
os.rmdir(dir) os.rmdir(dir)
} }
do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split()))}' do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()))}'

View File

@ -1,8 +1,8 @@
inherit kernel-uboot inherit kernel-uboot
python __anonymous () { python __anonymous () {
if "uImage" in (d.getVar('KERNEL_IMAGETYPES', True) or "").split(): if "uImage" in (d.getVar('KERNEL_IMAGETYPES') or "").split():
depends = d.getVar("DEPENDS", True) depends = d.getVar("DEPENDS")
depends = "%s u-boot-mkimage-native" % depends depends = "%s u-boot-mkimage-native" % depends
d.setVar("DEPENDS", depends) d.setVar("DEPENDS", depends)
@ -11,8 +11,8 @@ python __anonymous () {
# to build uImage using the kernel build system if and only if # to build uImage using the kernel build system if and only if
# KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into
# the uImage . # the uImage .
if d.getVar("KEEPUIMAGE", True) != 'yes': if d.getVar("KEEPUIMAGE") != 'yes':
typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or ""
if "uImage" in typeformake.split(): if "uImage" in typeformake.split():
d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('uImage', 'vmlinux')) d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('uImage', 'vmlinux'))
} }

View File

@ -290,7 +290,7 @@ python do_kernel_configcheck() {
if not os.path.exists(kmeta): if not os.path.exists(kmeta):
kmeta = "." + kmeta kmeta = "." + kmeta
pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH', True), "${S}/scripts/util/") pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH'), "${S}/scripts/util/")
cmd = d.expand("scc --configs -o ${S}/.kernel-meta") cmd = d.expand("scc --configs -o ${S}/.kernel-meta")
ret, configs = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd)) ret, configs = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd))

View File

@ -22,18 +22,18 @@ INITRAMFS_IMAGE_BUNDLE ?= ""
# number and cause kernel to be rebuilt. To avoid this, make # number and cause kernel to be rebuilt. To avoid this, make
# KERNEL_VERSION_NAME and KERNEL_VERSION_PKG_NAME depend on # KERNEL_VERSION_NAME and KERNEL_VERSION_PKG_NAME depend on
# LINUX_VERSION which is a constant. # LINUX_VERSION which is a constant.
KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION', True) or ""}" KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION') or ""}"
KERNEL_VERSION_NAME[vardepvalue] = "${LINUX_VERSION}" KERNEL_VERSION_NAME[vardepvalue] = "${LINUX_VERSION}"
KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION', True))}" KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}"
KERNEL_VERSION_PKG_NAME[vardepvalue] = "${LINUX_VERSION}" KERNEL_VERSION_PKG_NAME[vardepvalue] = "${LINUX_VERSION}"
python __anonymous () { python __anonymous () {
import re import re
# Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES
type = d.getVar('KERNEL_IMAGETYPE', True) or "" type = d.getVar('KERNEL_IMAGETYPE') or ""
alttype = d.getVar('KERNEL_ALT_IMAGETYPE', True) or "" alttype = d.getVar('KERNEL_ALT_IMAGETYPE') or ""
types = d.getVar('KERNEL_IMAGETYPES', True) or "" types = d.getVar('KERNEL_IMAGETYPES') or ""
if type not in types.split(): if type not in types.split():
types = (type + ' ' + types).strip() types = (type + ' ' + types).strip()
if alttype not in types.split(): if alttype not in types.split():
@ -56,15 +56,15 @@ python __anonymous () {
d.setVar('ALLOW_EMPTY_kernel-image-' + typelower, '1') d.setVar('ALLOW_EMPTY_kernel-image-' + typelower, '1')
imagedest = d.getVar('KERNEL_IMAGEDEST', True) imagedest = d.getVar('KERNEL_IMAGEDEST')
priority = d.getVar('KERNEL_PRIORITY', True) priority = d.getVar('KERNEL_PRIORITY')
postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n' postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n'
d.setVar('pkg_postinst_kernel-image-' + typelower, postinst) d.setVar('pkg_postinst_kernel-image-' + typelower, postinst)
postrm = '#!/bin/sh\n' + 'update-alternatives --remove' + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} || true' + '\n' postrm = '#!/bin/sh\n' + 'update-alternatives --remove' + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} || true' + '\n'
d.setVar('pkg_postrm_kernel-image-' + typelower, postrm) d.setVar('pkg_postrm_kernel-image-' + typelower, postrm)
image = d.getVar('INITRAMFS_IMAGE', True) image = d.getVar('INITRAMFS_IMAGE')
if image: if image:
d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete')
@ -72,7 +72,7 @@ python __anonymous () {
# The preferred method is to set INITRAMFS_IMAGE, because # The preferred method is to set INITRAMFS_IMAGE, because
# this INITRAMFS_TASK has circular dependency problems # this INITRAMFS_TASK has circular dependency problems
# if the initramfs requires kernel modules # if the initramfs requires kernel modules
image_task = d.getVar('INITRAMFS_TASK', True) image_task = d.getVar('INITRAMFS_TASK')
if image_task: if image_task:
d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}') d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}')
} }
@ -101,15 +101,15 @@ inherit ${KERNEL_CLASSES}
do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}"
do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}"
base_do_unpack_append () { base_do_unpack_append () {
s = d.getVar("S", True) s = d.getVar("S")
if s[-1] == '/': if s[-1] == '/':
# drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail
s=s[:-1] s=s[:-1]
kernsrc = d.getVar("STAGING_KERNEL_DIR", True) kernsrc = d.getVar("STAGING_KERNEL_DIR")
if s != kernsrc: if s != kernsrc:
bb.utils.mkdirhier(kernsrc) bb.utils.mkdirhier(kernsrc)
bb.utils.remove(kernsrc, recurse=True) bb.utils.remove(kernsrc, recurse=True)
if d.getVar("EXTERNALSRC", True): if d.getVar("EXTERNALSRC"):
# With EXTERNALSRC S will not be wiped so we can symlink to it # With EXTERNALSRC S will not be wiped so we can symlink to it
os.symlink(s, kernsrc) os.symlink(s, kernsrc)
else: else:
@ -127,9 +127,9 @@ PACKAGES_DYNAMIC += "^kernel-firmware-.*"
export OS = "${TARGET_OS}" export OS = "${TARGET_OS}"
export CROSS_COMPILE = "${TARGET_PREFIX}" export CROSS_COMPILE = "${TARGET_PREFIX}"
KERNEL_PRIORITY ?= "${@int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[0]) * 10000 + \ KERNEL_PRIORITY ?= "${@int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[0]) * 10000 + \
int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[1]) * 100 + \ int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[1]) * 100 + \
int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[-1])}" int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[-1])}"
KERNEL_RELEASE ?= "${KERNEL_VERSION}" KERNEL_RELEASE ?= "${KERNEL_VERSION}"
@ -140,7 +140,7 @@ KERNEL_IMAGEDEST = "boot"
# #
# configuration # configuration
# #
export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE", True) or "ttyS0"}" export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE") or "ttyS0"}"
KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}" KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}"
@ -430,14 +430,14 @@ sysroot_stage_all () {
KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig" KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig"
python check_oldest_kernel() { python check_oldest_kernel() {
oldest_kernel = d.getVar('OLDEST_KERNEL', True) oldest_kernel = d.getVar('OLDEST_KERNEL')
kernel_version = d.getVar('KERNEL_VERSION', True) kernel_version = d.getVar('KERNEL_VERSION')
tclibc = d.getVar('TCLIBC', True) tclibc = d.getVar('TCLIBC')
if tclibc == 'glibc': if tclibc == 'glibc':
kernel_version = kernel_version.split('-', 1)[0] kernel_version = kernel_version.split('-', 1)[0]
if oldest_kernel and kernel_version: if oldest_kernel and kernel_version:
if bb.utils.vercmp_string(kernel_version, oldest_kernel) < 0: if bb.utils.vercmp_string(kernel_version, oldest_kernel) < 0:
bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN', True), oldest_kernel, kernel_version, tclibc)) bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN'), oldest_kernel, kernel_version, tclibc))
} }
check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION" check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION"

View File

@ -17,15 +17,15 @@ do_install() {
} }
def get_libc_fpu_setting(bb, d): def get_libc_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', True) in [ 'soft', 'ppc-efd' ]: if d.getVar('TARGET_FPU') in [ 'soft', 'ppc-efd' ]:
return "--without-fp" return "--without-fp"
return "" return ""
python populate_packages_prepend () { python populate_packages_prepend () {
if d.getVar('DEBIAN_NAMES', True): if d.getVar('DEBIAN_NAMES'):
pkgs = d.getVar('PACKAGES', True).split() pkgs = d.getVar('PACKAGES').split()
bpn = d.getVar('BPN', True) bpn = d.getVar('BPN')
prefix = d.getVar('MLPREFIX', True) or "" prefix = d.getVar('MLPREFIX') or ""
# Set the base package... # Set the base package...
d.setVar('PKG_' + prefix + bpn, prefix + 'libc6') d.setVar('PKG_' + prefix + bpn, prefix + 'libc6')
libcprefix = prefix + bpn + '-' libcprefix = prefix + bpn + '-'

View File

@ -12,24 +12,24 @@ GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
GLIBC_SPLIT_LC_PACKAGES ?= "0" GLIBC_SPLIT_LC_PACKAGES ?= "0"
python __anonymous () { python __anonymous () {
enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True) enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION")
pn = d.getVar("PN", True) pn = d.getVar("PN")
if pn.endswith("-initial"): if pn.endswith("-initial"):
enabled = False enabled = False
if enabled and int(enabled): if enabled and int(enabled):
import re import re
target_arch = d.getVar("TARGET_ARCH", True) target_arch = d.getVar("TARGET_ARCH")
binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or "" binary_arches = d.getVar("BINARY_LOCALE_ARCHES") or ""
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "" use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or ""
for regexp in binary_arches.split(" "): for regexp in binary_arches.split(" "):
r = re.compile(regexp) r = re.compile(regexp)
if r.match(target_arch): if r.match(target_arch):
depends = d.getVar("DEPENDS", True) depends = d.getVar("DEPENDS")
if use_cross_localedef == "1" : if use_cross_localedef == "1" :
depends = "%s cross-localedef-native" % depends depends = "%s cross-localedef-native" % depends
else: else:
@ -94,21 +94,21 @@ inherit qemu
python package_do_split_gconvs () { python package_do_split_gconvs () {
import re import re
if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): if (d.getVar('PACKAGE_NO_GCONV') == '1'):
bb.note("package requested not splitting gconvs") bb.note("package requested not splitting gconvs")
return return
if not d.getVar('PACKAGES', True): if not d.getVar('PACKAGES'):
return return
mlprefix = d.getVar("MLPREFIX", True) or "" mlprefix = d.getVar("MLPREFIX") or ""
bpn = d.getVar('BPN', True) bpn = d.getVar('BPN')
libdir = d.getVar('libdir', True) libdir = d.getVar('libdir')
if not libdir: if not libdir:
bb.error("libdir not defined") bb.error("libdir not defined")
return return
datadir = d.getVar('datadir', True) datadir = d.getVar('datadir')
if not datadir: if not datadir:
bb.error("datadir not defined") bb.error("datadir not defined")
return return
@ -116,7 +116,7 @@ python package_do_split_gconvs () {
gconv_libdir = base_path_join(libdir, "gconv") gconv_libdir = base_path_join(libdir, "gconv")
charmap_dir = base_path_join(datadir, "i18n", "charmaps") charmap_dir = base_path_join(datadir, "i18n", "charmaps")
locales_dir = base_path_join(datadir, "i18n", "locales") locales_dir = base_path_join(datadir, "i18n", "locales")
binary_locales_dir = d.getVar('localedir', True) binary_locales_dir = d.getVar('localedir')
def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
deps = [] deps = []
@ -183,13 +183,13 @@ python package_do_split_gconvs () {
description='locale definition for %s', hook=calc_locale_deps, extra_depends='') description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv') d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv')
use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE")
dot_re = re.compile("(.*)\.(.*)") dot_re = re.compile("(.*)\.(.*)")
# Read in supported locales and associated encodings # Read in supported locales and associated encodings
supported = {} supported = {}
with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f: with open(base_path_join(d.getVar('WORKDIR'), "SUPPORTED")) as f:
for line in f.readlines(): for line in f.readlines():
try: try:
locale, charset = line.rstrip().split() locale, charset = line.rstrip().split()
@ -198,7 +198,7 @@ python package_do_split_gconvs () {
supported[locale] = charset supported[locale] = charset
# GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True) to_generate = d.getVar('GLIBC_GENERATE_LOCALES')
if not to_generate or to_generate == 'all': if not to_generate or to_generate == 'all':
to_generate = sorted(supported.keys()) to_generate = sorted(supported.keys())
else: else:
@ -215,14 +215,14 @@ python package_do_split_gconvs () {
def output_locale_source(name, pkgname, locale, encoding): def output_locale_source(name, pkgname, locale, encoding):
d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \ d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \
(mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst') \
% (locale, encoding, locale)) % (locale, encoding, locale))
d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm') % \
(locale, encoding, locale)) (locale, encoding, locale))
def output_locale_binary_rdepends(name, pkgname, locale, encoding): def output_locale_binary_rdepends(name, pkgname, locale, encoding):
dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name)) dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name))
lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES', True) lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES')
if lcsplit and int(lcsplit): if lcsplit and int(lcsplit):
d.appendVar('PACKAGES', ' ' + dep) d.appendVar('PACKAGES', ' ' + dep)
d.setVar('ALLOW_EMPTY_%s' % dep, '1') d.setVar('ALLOW_EMPTY_%s' % dep, '1')
@ -231,16 +231,16 @@ python package_do_split_gconvs () {
commands = {} commands = {}
def output_locale_binary(name, pkgname, locale, encoding): def output_locale_binary(name, pkgname, locale, encoding):
treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") treedir = base_path_join(d.getVar("WORKDIR"), "locale-tree")
ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) ldlibdir = base_path_join(treedir, d.getVar("base_libdir"))
path = d.getVar("PATH", True) path = d.getVar("PATH")
i18npath = base_path_join(treedir, datadir, "i18n") i18npath = base_path_join(treedir, datadir, "i18n")
gconvpath = base_path_join(treedir, "iconvdata") gconvpath = base_path_join(treedir, "iconvdata")
outputpath = base_path_join(treedir, binary_locales_dir) outputpath = base_path_join(treedir, binary_locales_dir)
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or "0"
if use_cross_localedef == "1": if use_cross_localedef == "1":
target_arch = d.getVar('TARGET_ARCH', True) target_arch = d.getVar('TARGET_ARCH')
locale_arch_options = { \ locale_arch_options = { \
"arm": " --uint32-align=4 --little-endian ", \ "arm": " --uint32-align=4 --little-endian ", \
"armeb": " --uint32-align=4 --big-endian ", \ "armeb": " --uint32-align=4 --big-endian ", \
@ -279,7 +279,7 @@ python package_do_split_gconvs () {
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \ --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name) % (treedir, datadir, locale, encoding, name)
qemu_options = d.getVar('QEMU_OPTIONS', True) qemu_options = d.getVar('QEMU_OPTIONS')
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
@ -292,7 +292,7 @@ python package_do_split_gconvs () {
def output_locale(name, locale, encoding): def output_locale(name, locale, encoding):
pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name) pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name)
d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES')))
rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
m = re.match("(.*)_(.*)", name) m = re.match("(.*)_(.*)", name)
if m: if m:
@ -311,8 +311,8 @@ python package_do_split_gconvs () {
bb.note("preparing tree for binary locale generation") bb.note("preparing tree for binary locale generation")
bb.build.exec_func("do_prep_locale_tree", d) bb.build.exec_func("do_prep_locale_tree", d)
utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) utf8_only = int(d.getVar('LOCALE_UTF8_ONLY') or 0)
utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT', True) or 0) utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT') or 0)
encodings = {} encodings = {}
for locale in to_generate: for locale in to_generate:
@ -344,7 +344,7 @@ python package_do_split_gconvs () {
d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg) d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg)
if use_bin == "compile": if use_bin == "compile":
makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") makefile = base_path_join(d.getVar("WORKDIR"), "locale-tree", "Makefile")
m = open(makefile, "w") m = open(makefile, "w")
m.write("all: %s\n\n" % " ".join(commands.keys())) m.write("all: %s\n\n" % " ".join(commands.keys()))
for cmd in commands: for cmd in commands:
@ -358,7 +358,7 @@ python package_do_split_gconvs () {
bb.build.exec_func("do_collect_bins_from_locale_tree", d) bb.build.exec_func("do_collect_bins_from_locale_tree", d)
if use_bin in ('compile', 'precompiled'): if use_bin in ('compile', 'precompiled'):
lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES', True) lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES')
if lcsplit and int(lcsplit): if lcsplit and int(lcsplit):
do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \ do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \
output_pattern=bpn+'-binary-localedata-%s', \ output_pattern=bpn+'-binary-localedata-%s', \

View File

@ -37,13 +37,13 @@ python license_create_manifest() {
import oe.packagedata import oe.packagedata
from oe.rootfs import image_list_installed_packages from oe.rootfs import image_list_installed_packages
build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS', True) build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS')
if build_images_from_feeds == "1": if build_images_from_feeds == "1":
return 0 return 0
pkg_dic = {} pkg_dic = {}
for pkg in sorted(image_list_installed_packages(d)): for pkg in sorted(image_list_installed_packages(d)):
pkg_info = os.path.join(d.getVar('PKGDATA_DIR', True), pkg_info = os.path.join(d.getVar('PKGDATA_DIR'),
'runtime-reverse', pkg) 'runtime-reverse', pkg)
pkg_name = os.path.basename(os.readlink(pkg_info)) pkg_name = os.path.basename(os.readlink(pkg_info))
@ -52,15 +52,15 @@ python license_create_manifest() {
pkg_lic_name = "LICENSE_" + pkg_name pkg_lic_name = "LICENSE_" + pkg_name
pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name] pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name]
rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY', True), rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'),
d.getVar('IMAGE_NAME', True), 'license.manifest') d.getVar('IMAGE_NAME'), 'license.manifest')
write_license_files(d, rootfs_license_manifest, pkg_dic) write_license_files(d, rootfs_license_manifest, pkg_dic)
} }
def write_license_files(d, license_manifest, pkg_dic): def write_license_files(d, license_manifest, pkg_dic):
import re import re
bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE", True) or "").split() bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE") or "").split()
bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses) bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses)
bad_licenses = expand_wildcard_licenses(d, bad_licenses) bad_licenses = expand_wildcard_licenses(d, bad_licenses)
@ -72,7 +72,7 @@ def write_license_files(d, license_manifest, pkg_dic):
oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"], oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"],
bad_licenses, canonical_license, d) bad_licenses, canonical_license, d)
except oe.license.LicenseError as exc: except oe.license.LicenseError as exc:
bb.fatal('%s: %s' % (d.getVar('P', True), exc)) bb.fatal('%s: %s' % (d.getVar('P'), exc))
else: else:
pkg_dic[pkg]["LICENSES"] = re.sub('[|&()*]', ' ', pkg_dic[pkg]["LICENSE"]) pkg_dic[pkg]["LICENSES"] = re.sub('[|&()*]', ' ', pkg_dic[pkg]["LICENSE"])
pkg_dic[pkg]["LICENSES"] = re.sub(' *', ' ', pkg_dic[pkg]["LICENSES"]) pkg_dic[pkg]["LICENSES"] = re.sub(' *', ' ', pkg_dic[pkg]["LICENSES"])
@ -98,7 +98,7 @@ def write_license_files(d, license_manifest, pkg_dic):
license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"]) license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"])
for lic in pkg_dic[pkg]["LICENSES"]: for lic in pkg_dic[pkg]["LICENSES"]:
lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY', True), lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY'),
pkg_dic[pkg]["PN"], "generic_%s" % pkg_dic[pkg]["PN"], "generic_%s" %
re.sub('\+', '', lic)) re.sub('\+', '', lic))
# add explicity avoid of CLOSED license because isn't generic # add explicity avoid of CLOSED license because isn't generic
@ -114,8 +114,8 @@ def write_license_files(d, license_manifest, pkg_dic):
# - Just copy the manifest # - Just copy the manifest
# - Copy the manifest and the license directories # - Copy the manifest and the license directories
# With both options set we see a .5 M increase in core-image-minimal # With both options set we see a .5 M increase in core-image-minimal
copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST', True) copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST')
copy_lic_dirs = d.getVar('COPY_LIC_DIRS', True) copy_lic_dirs = d.getVar('COPY_LIC_DIRS')
if copy_lic_manifest == "1": if copy_lic_manifest == "1":
rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'), rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'),
'usr', 'share', 'common-licenses') 'usr', 'share', 'common-licenses')
@ -129,7 +129,7 @@ def write_license_files(d, license_manifest, pkg_dic):
for pkg in sorted(pkg_dic): for pkg in sorted(pkg_dic):
pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg) pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg)
bb.utils.mkdirhier(pkg_rootfs_license_dir) bb.utils.mkdirhier(pkg_rootfs_license_dir)
pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
pkg_dic[pkg]["PN"]) pkg_dic[pkg]["PN"])
licenses = os.listdir(pkg_license_dir) licenses = os.listdir(pkg_license_dir)
for lic in licenses: for lic in licenses:
@ -166,7 +166,7 @@ def license_deployed_manifest(d):
dep_dic = {} dep_dic = {}
man_dic = {} man_dic = {}
lic_dir = d.getVar("LICENSE_DIRECTORY", True) lic_dir = d.getVar("LICENSE_DIRECTORY")
dep_dic = get_deployed_dependencies(d) dep_dic = get_deployed_dependencies(d)
for dep in dep_dic.keys(): for dep in dep_dic.keys():
@ -181,8 +181,8 @@ def license_deployed_manifest(d):
key,val = line.split(": ", 1) key,val = line.split(": ", 1)
man_dic[dep][key] = val[:-1] man_dic[dep][key] = val[:-1]
lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'),
d.getVar('IMAGE_NAME', True)) d.getVar('IMAGE_NAME'))
bb.utils.mkdirhier(lic_manifest_dir) bb.utils.mkdirhier(lic_manifest_dir)
image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest') image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest')
write_license_files(d, image_license_manifest, man_dic) write_license_files(d, image_license_manifest, man_dic)
@ -202,7 +202,7 @@ def get_deployed_dependencies(d):
depends = list(set([dep[0] for dep depends = list(set([dep[0] for dep
in list(taskdata.values()) in list(taskdata.values())
if not dep[0].endswith("-native")])) if not dep[0].endswith("-native")]))
extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) extra_depends = d.getVar("EXTRA_IMAGEDEPENDS")
boot_depends = get_boot_dependencies(d) boot_depends = get_boot_dependencies(d)
depends.extend(extra_depends.split()) depends.extend(extra_depends.split())
depends.extend(boot_depends) depends.extend(boot_depends)
@ -212,13 +212,13 @@ def get_deployed_dependencies(d):
# the SSTATE_MANIFESTS for "deploy" task. # the SSTATE_MANIFESTS for "deploy" task.
# The manifest file name contains the arch. Because we are not running # The manifest file name contains the arch. Because we are not running
# in the recipe context it is necessary to check every arch used. # in the recipe context it is necessary to check every arch used.
sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS", True) sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS")
sstate_archs = d.getVar("SSTATE_ARCHS", True) sstate_archs = d.getVar("SSTATE_ARCHS")
extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS", True) extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS")
archs = list(set(("%s %s" % (sstate_archs, extra_archs)).split())) archs = list(set(("%s %s" % (sstate_archs, extra_archs)).split()))
for dep in depends: for dep in depends:
# Some recipes have an arch on their own, so we try that first. # Some recipes have an arch on their own, so we try that first.
special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep, True) special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep)
if special_arch: if special_arch:
sstate_manifest_file = os.path.join(sstate_manifest_dir, sstate_manifest_file = os.path.join(sstate_manifest_dir,
"manifest-%s-%s.deploy" % (special_arch, dep)) "manifest-%s-%s.deploy" % (special_arch, dep))
@ -254,7 +254,7 @@ def get_boot_dependencies(d):
in boot_depends_string.split() in boot_depends_string.split()
if not dep.split(":")[0].endswith("-native")] if not dep.split(":")[0].endswith("-native")]
for dep in boot_depends: for dep in boot_depends:
info_file = os.path.join(d.getVar("LICENSE_DIRECTORY", True), info_file = os.path.join(d.getVar("LICENSE_DIRECTORY"),
dep, "recipeinfo") dep, "recipeinfo")
# If the recipe and dependency name is the same # If the recipe and dependency name is the same
if os.path.exists(info_file): if os.path.exists(info_file):
@ -265,7 +265,7 @@ def get_boot_dependencies(d):
# The fifth field contains what the task provides # The fifth field contains what the task provides
if dep in taskdep[4]: if dep in taskdep[4]:
info_file = os.path.join( info_file = os.path.join(
d.getVar("LICENSE_DIRECTORY", True), d.getVar("LICENSE_DIRECTORY"),
taskdep[0], "recipeinfo") taskdep[0], "recipeinfo")
if os.path.exists(info_file): if os.path.exists(info_file):
depends.append(taskdep[0]) depends.append(taskdep[0])
@ -295,7 +295,7 @@ python do_populate_lic() {
lic_files_paths = find_license_files(d) lic_files_paths = find_license_files(d)
# The base directory we wrangle licenses to # The base directory we wrangle licenses to
destdir = os.path.join(d.getVar('LICSSTATEDIR', True), d.getVar('PN', True)) destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('PN'))
copy_license_files(lic_files_paths, destdir) copy_license_files(lic_files_paths, destdir)
info = get_recipe_info(d) info = get_recipe_info(d)
with open(os.path.join(destdir, "recipeinfo"), "w") as f: with open(os.path.join(destdir, "recipeinfo"), "w") as f:
@ -306,11 +306,11 @@ python do_populate_lic() {
# it would be better to copy them in do_install_append, but find_license_filesa is python # it would be better to copy them in do_install_append, but find_license_filesa is python
python perform_packagecopy_prepend () { python perform_packagecopy_prepend () {
enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
if d.getVar('CLASSOVERRIDE', True) == 'class-target' and enabled: if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled:
lic_files_paths = find_license_files(d) lic_files_paths = find_license_files(d)
# LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY
destdir = d.getVar('D', True) + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY', True), d.getVar('PN', True)) destdir = d.getVar('D') + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY'), d.getVar('PN'))
copy_license_files(lic_files_paths, destdir) copy_license_files(lic_files_paths, destdir)
add_package_and_files(d) add_package_and_files(d)
} }
@ -318,15 +318,15 @@ perform_packagecopy[vardeps] += "LICENSE_CREATE_PACKAGE"
def get_recipe_info(d): def get_recipe_info(d):
info = {} info = {}
info["PV"] = d.getVar("PV", True) info["PV"] = d.getVar("PV")
info["PR"] = d.getVar("PR", True) info["PR"] = d.getVar("PR")
info["LICENSE"] = d.getVar("LICENSE", True) info["LICENSE"] = d.getVar("LICENSE")
return info return info
def add_package_and_files(d): def add_package_and_files(d):
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
files = d.getVar('LICENSE_FILES_DIRECTORY', True) files = d.getVar('LICENSE_FILES_DIRECTORY')
pn = d.getVar('PN', True) pn = d.getVar('PN')
pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False)) pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False))
if pn_lic in packages: if pn_lic in packages:
bb.warn("%s package already existed in %s." % (pn_lic, pn)) bb.warn("%s package already existed in %s." % (pn_lic, pn))
@ -334,7 +334,7 @@ def add_package_and_files(d):
# first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY
d.setVar('PACKAGES', "%s %s" % (pn_lic, packages)) d.setVar('PACKAGES', "%s %s" % (pn_lic, packages))
d.setVar('FILES_' + pn_lic, files) d.setVar('FILES_' + pn_lic, files)
rrecommends_pn = d.getVar('RRECOMMENDS_' + pn, True) rrecommends_pn = d.getVar('RRECOMMENDS_' + pn)
if rrecommends_pn: if rrecommends_pn:
d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn)) d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn))
else: else:
@ -390,12 +390,12 @@ def find_license_files(d):
from collections import defaultdict, OrderedDict from collections import defaultdict, OrderedDict
# All the license files for the package # All the license files for the package
lic_files = d.getVar('LIC_FILES_CHKSUM', True) or "" lic_files = d.getVar('LIC_FILES_CHKSUM') or ""
pn = d.getVar('PN', True) pn = d.getVar('PN')
# The license files are located in S/LIC_FILE_CHECKSUM. # The license files are located in S/LIC_FILE_CHECKSUM.
srcdir = d.getVar('S', True) srcdir = d.getVar('S')
# Directory we store the generic licenses as set in the distro configuration # Directory we store the generic licenses as set in the distro configuration
generic_directory = d.getVar('COMMON_LICENSE_DIR', True) generic_directory = d.getVar('COMMON_LICENSE_DIR')
# List of basename, path tuples # List of basename, path tuples
lic_files_paths = [] lic_files_paths = []
# Entries from LIC_FILES_CHKSUM # Entries from LIC_FILES_CHKSUM
@ -403,7 +403,7 @@ def find_license_files(d):
license_source_dirs = [] license_source_dirs = []
license_source_dirs.append(generic_directory) license_source_dirs.append(generic_directory)
try: try:
additional_lic_dirs = d.getVar('LICENSE_PATH', True).split() additional_lic_dirs = d.getVar('LICENSE_PATH').split()
for lic_dir in additional_lic_dirs: for lic_dir in additional_lic_dirs:
license_source_dirs.append(lic_dir) license_source_dirs.append(lic_dir)
except: except:
@ -473,18 +473,18 @@ def find_license_files(d):
try: try:
(type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
except bb.fetch.MalformedUrl: except bb.fetch.MalformedUrl:
bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF', True), url)) bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF'), url))
# We want the license filename and path # We want the license filename and path
chksum = parm['md5'] if 'md5' in parm else parm['sha256'] chksum = parm['md5'] if 'md5' in parm else parm['sha256']
lic_chksums[path] = chksum lic_chksums[path] = chksum
v = FindVisitor() v = FindVisitor()
try: try:
v.visit_string(d.getVar('LICENSE', True)) v.visit_string(d.getVar('LICENSE'))
except oe.license.InvalidLicense as exc: except oe.license.InvalidLicense as exc:
bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) bb.fatal('%s: %s' % (d.getVar('PF'), exc))
except SyntaxError: except SyntaxError:
bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF', True))) bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF')))
# Add files from LIC_FILES_CHKSUM to list of license files # Add files from LIC_FILES_CHKSUM to list of license files
lic_chksum_paths = defaultdict(OrderedDict) lic_chksum_paths = defaultdict(OrderedDict)
@ -542,7 +542,7 @@ def expand_wildcard_licenses(d, wildcard_licenses):
def incompatible_license_contains(license, truevalue, falsevalue, d): def incompatible_license_contains(license, truevalue, falsevalue, d):
license = canonical_license(d, license) license = canonical_license(d, license)
bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split()
bad_licenses = expand_wildcard_licenses(d, bad_licenses) bad_licenses = expand_wildcard_licenses(d, bad_licenses)
return truevalue if license in bad_licenses else falsevalue return truevalue if license in bad_licenses else falsevalue
@ -553,9 +553,9 @@ def incompatible_license(d, dont_want_licenses, package=None):
as canonical (SPDX) names. as canonical (SPDX) names.
""" """
import oe.license import oe.license
license = d.getVar("LICENSE_%s" % package, True) if package else None license = d.getVar("LICENSE_%s" % package) if package else None
if not license: if not license:
license = d.getVar('LICENSE', True) license = d.getVar('LICENSE')
# Handles an "or" or two license sets provided by # Handles an "or" or two license sets provided by
# flattened_licenses(), pick one that works if possible. # flattened_licenses(), pick one that works if possible.
@ -566,7 +566,7 @@ def incompatible_license(d, dont_want_licenses, package=None):
try: try:
licenses = oe.license.flattened_licenses(license, choose_lic_set) licenses = oe.license.flattened_licenses(license, choose_lic_set)
except oe.license.LicenseError as exc: except oe.license.LicenseError as exc:
bb.fatal('%s: %s' % (d.getVar('P', True), exc)) bb.fatal('%s: %s' % (d.getVar('P'), exc))
return any(not oe.license.license_ok(canonical_license(d, l), \ return any(not oe.license.license_ok(canonical_license(d, l), \
dont_want_licenses) for l in licenses) dont_want_licenses) for l in licenses)
@ -614,16 +614,16 @@ def check_license_flags(d):
def all_license_flags_match(license_flags, whitelist): def all_license_flags_match(license_flags, whitelist):
""" Return first unmatched flag, None if all flags match """ """ Return first unmatched flag, None if all flags match """
pn = d.getVar('PN', True) pn = d.getVar('PN')
split_whitelist = whitelist.split() split_whitelist = whitelist.split()
for flag in license_flags.split(): for flag in license_flags.split():
if not license_flag_matches(flag, split_whitelist, pn): if not license_flag_matches(flag, split_whitelist, pn):
return flag return flag
return None return None
license_flags = d.getVar('LICENSE_FLAGS', True) license_flags = d.getVar('LICENSE_FLAGS')
if license_flags: if license_flags:
whitelist = d.getVar('LICENSE_FLAGS_WHITELIST', True) whitelist = d.getVar('LICENSE_FLAGS_WHITELIST')
if not whitelist: if not whitelist:
return license_flags return license_flags
unmatched_flag = all_license_flags_match(license_flags, whitelist) unmatched_flag = all_license_flags_match(license_flags, whitelist)
@ -637,8 +637,8 @@ def check_license_format(d):
Validate operators in LICENSES. Validate operators in LICENSES.
No spaces are allowed between LICENSES. No spaces are allowed between LICENSES.
""" """
pn = d.getVar('PN', True) pn = d.getVar('PN')
licenses = d.getVar('LICENSE', True) licenses = d.getVar('LICENSE')
from oe.license import license_operator, license_operator_chars, license_pattern from oe.license import license_operator, license_operator_chars, license_pattern
elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) elements = list(filter(lambda x: x.strip(), license_operator.split(licenses)))

View File

@ -4,11 +4,11 @@ def set_live_vm_vars(d, suffix):
vars = ['GRUB_CFG', 'SYSLINUX_CFG', 'ROOT', 'LABELS', 'INITRD'] vars = ['GRUB_CFG', 'SYSLINUX_CFG', 'ROOT', 'LABELS', 'INITRD']
for var in vars: for var in vars:
var_with_suffix = var + '_' + suffix var_with_suffix = var + '_' + suffix
if d.getVar(var, True): if d.getVar(var):
bb.warn('Found potential conflicted var %s, please use %s rather than %s' % \ bb.warn('Found potential conflicted var %s, please use %s rather than %s' % \
(var, var_with_suffix, var)) (var, var_with_suffix, var))
elif d.getVar(var_with_suffix, True): elif d.getVar(var_with_suffix):
d.setVar(var, d.getVar(var_with_suffix, True)) d.setVar(var, d.getVar(var_with_suffix))
EFI = "${@bb.utils.contains("MACHINE_FEATURES", "efi", "1", "0", d)}" EFI = "${@bb.utils.contains("MACHINE_FEATURES", "efi", "1", "0", d)}"
@ -25,7 +25,7 @@ def pcbios(d):
return pcbios return pcbios
PCBIOS = "${@pcbios(d)}" PCBIOS = "${@pcbios(d)}"
PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS', True) == '1']}" PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS') == '1']}"
inherit ${EFI_CLASS} inherit ${EFI_CLASS}
inherit ${PCBIOS_CLASS} inherit ${PCBIOS_CLASS}

View File

@ -26,7 +26,7 @@ def base_detect_branch(d):
return "<unknown>" return "<unknown>"
def base_get_scmbasepath(d): def base_get_scmbasepath(d):
return os.path.join(d.getVar('COREBASE', True), 'meta') return os.path.join(d.getVar('COREBASE'), 'meta')
def base_get_metadata_monotone_branch(path, d): def base_get_metadata_monotone_branch(path, d):
monotone_branch = "<unknown>" monotone_branch = "<unknown>"

View File

@ -6,12 +6,12 @@ python migrate_localcount_handler () {
if not e.data: if not e.data:
return return
pv = e.data.getVar('PV', True) pv = e.data.getVar('PV')
if not 'AUTOINC' in pv: if not 'AUTOINC' in pv:
return return
localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data) localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data)
pn = e.data.getVar('PN', True) pn = e.data.getVar('PN')
revs = localcounts.get_by_pattern('%%-%s_rev' % pn) revs = localcounts.get_by_pattern('%%-%s_rev' % pn)
counts = localcounts.get_by_pattern('%%-%s_count' % pn) counts = localcounts.get_by_pattern('%%-%s_count' % pn)
if not revs or not counts: if not revs or not counts:
@ -21,10 +21,10 @@ python migrate_localcount_handler () {
bb.warn("The number of revs and localcounts don't match in %s" % pn) bb.warn("The number of revs and localcounts don't match in %s" % pn)
return return
version = e.data.getVar('PRAUTOINX', True) version = e.data.getVar('PRAUTOINX')
srcrev = bb.fetch2.get_srcrev(e.data) srcrev = bb.fetch2.get_srcrev(e.data)
base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)] base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)]
pkgarch = e.data.getVar('PACKAGE_ARCH', True) pkgarch = e.data.getVar('PACKAGE_ARCH')
value = max(int(count) for count in counts) value = max(int(count) for count in counts)
if len(revs) == 1: if len(revs) == 1:
@ -33,8 +33,8 @@ python migrate_localcount_handler () {
else: else:
value += 1 value += 1
bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR', True)) bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR'))
df = e.data.getVar('LOCALCOUNT_DUMPFILE', True) df = e.data.getVar('LOCALCOUNT_DUMPFILE')
flock = bb.utils.lockfile("%s.lock" % df) flock = bb.utils.lockfile("%s.lock" % df)
with open(df, 'a') as fd: with open(df, 'a') as fd:
fd.write('PRAUTO$%s$%s$%s = "%s"\n' % fd.write('PRAUTO$%s$%s$%s = "%s"\n' %

View File

@ -28,8 +28,8 @@ fi
python populate_packages_append () { python populate_packages_append () {
import re import re
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
for pkg in packages: for pkg in packages:
mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg) mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
@ -41,15 +41,15 @@ python populate_packages_append () {
mimes.append(f) mimes.append(f)
if mimes: if mimes:
bb.note("adding mime postinst and postrm scripts to %s" % pkg) bb.note("adding mime postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('mime_postinst', True) postinst += d.getVar('mime_postinst')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) postrm = d.getVar('pkg_postrm_%s' % pkg)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('mime_postrm', True) postrm += d.getVar('mime_postrm')
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
bb.note("adding shared-mime-info-data dependency to %s" % pkg) bb.note("adding shared-mime-info-data dependency to %s" % pkg)
d.appendVar('RDEPENDS_' + pkg, " shared-mime-info-data") d.appendVar('RDEPENDS_' + pkg, " shared-mime-info-data")

View File

@ -9,7 +9,7 @@ EXTRA_OEMAKE += "KERNEL_SRC=${STAGING_KERNEL_DIR}"
MODULES_INSTALL_TARGET ?= "modules_install" MODULES_INSTALL_TARGET ?= "modules_install"
python __anonymous () { python __anonymous () {
depends = d.getVar('DEPENDS', True) depends = d.getVar('DEPENDS')
extra_symbols = [] extra_symbols = []
for dep in depends.split(): for dep in depends.split():
if dep.startswith("kernel-module-"): if dep.startswith("kernel-module-"):

View File

@ -1,20 +1,20 @@
python multilib_virtclass_handler () { python multilib_virtclass_handler () {
cls = e.data.getVar("BBEXTENDCURR", True) cls = e.data.getVar("BBEXTENDCURR")
variant = e.data.getVar("BBEXTENDVARIANT", True) variant = e.data.getVar("BBEXTENDVARIANT")
if cls != "multilib" or not variant: if cls != "multilib" or not variant:
return return
e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR', True)) e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR'))
# There should only be one kernel in multilib configs # There should only be one kernel in multilib configs
# We also skip multilib setup for module packages. # We also skip multilib setup for module packages.
provides = (e.data.getVar("PROVIDES", True) or "").split() provides = (e.data.getVar("PROVIDES") or "").split()
if "virtual/kernel" in provides or bb.data.inherits_class('module-base', e.data): if "virtual/kernel" in provides or bb.data.inherits_class('module-base', e.data):
raise bb.parse.SkipPackage("We shouldn't have multilib variants for the kernel") raise bb.parse.SkipPackage("We shouldn't have multilib variants for the kernel")
save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME", True) or "" save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME") or ""
for name in save_var_name.split(): for name in save_var_name.split():
val=e.data.getVar(name, True) val=e.data.getVar(name)
if val: if val:
e.data.setVar(name + "_MULTILIB_ORIGINAL", val) e.data.setVar(name + "_MULTILIB_ORIGINAL", val)
@ -26,7 +26,7 @@ python multilib_virtclass_handler () {
if bb.data.inherits_class('image', e.data): if bb.data.inherits_class('image', e.data):
e.data.setVar("MLPREFIX", variant + "-") e.data.setVar("MLPREFIX", variant + "-")
e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False))
e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT', True)) e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT'))
target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False) target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False)
if target_vendor: if target_vendor:
e.data.setVar("TARGET_VENDOR", target_vendor) e.data.setVar("TARGET_VENDOR", target_vendor)
@ -50,7 +50,7 @@ python multilib_virtclass_handler () {
# Expand this since this won't work correctly once we set a multilib into place # Expand this since this won't work correctly once we set a multilib into place
e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS"))
override = ":virtclass-multilib-" + variant override = ":virtclass-multilib-" + variant
@ -60,7 +60,7 @@ python multilib_virtclass_handler () {
# Expand the WHITELISTs with multilib prefix # Expand the WHITELISTs with multilib prefix
for whitelist in ["WHITELIST_GPL-3.0", "LGPLv2_WHITELIST_GPL-3.0"]: for whitelist in ["WHITELIST_GPL-3.0", "LGPLv2_WHITELIST_GPL-3.0"]:
pkgs = e.data.getVar(whitelist, True) pkgs = e.data.getVar(whitelist)
for pkg in pkgs.split(): for pkg in pkgs.split():
pkgs += " " + variant + "-" + pkg pkgs += " " + variant + "-" + pkg
e.data.setVar(whitelist, pkgs) e.data.setVar(whitelist, pkgs)
@ -78,7 +78,7 @@ multilib_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
STAGINGCC_prepend = "${BBEXTENDVARIANT}-" STAGINGCC_prepend = "${BBEXTENDVARIANT}-"
python __anonymous () { python __anonymous () {
variant = d.getVar("BBEXTENDVARIANT", True) variant = d.getVar("BBEXTENDVARIANT")
import oe.classextend import oe.classextend
@ -88,7 +88,7 @@ python __anonymous () {
clsextend.map_depends_variable("PACKAGE_INSTALL") clsextend.map_depends_variable("PACKAGE_INSTALL")
clsextend.map_depends_variable("LINGUAS_INSTALL") clsextend.map_depends_variable("LINGUAS_INSTALL")
clsextend.map_depends_variable("RDEPENDS") clsextend.map_depends_variable("RDEPENDS")
pinstall = d.getVar("LINGUAS_INSTALL", True) + " " + d.getVar("PACKAGE_INSTALL", True) pinstall = d.getVar("LINGUAS_INSTALL") + " " + d.getVar("PACKAGE_INSTALL")
d.setVar("PACKAGE_INSTALL", pinstall) d.setVar("PACKAGE_INSTALL", pinstall)
d.setVar("LINGUAS_INSTALL", "") d.setVar("LINGUAS_INSTALL", "")
# FIXME, we need to map this to something, not delete it! # FIXME, we need to map this to something, not delete it!
@ -104,7 +104,7 @@ python __anonymous () {
return return
clsextend.rename_packages() clsextend.rename_packages()
clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split())
clsextend.map_packagevars() clsextend.map_packagevars()
clsextend.map_regexp_variable("PACKAGES_DYNAMIC") clsextend.map_regexp_variable("PACKAGES_DYNAMIC")
@ -119,7 +119,7 @@ PACKAGEFUNCS_append = " do_package_qa_multilib"
python do_package_qa_multilib() { python do_package_qa_multilib() {
def check_mlprefix(pkg, var, mlprefix): def check_mlprefix(pkg, var, mlprefix):
values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var, True) or "") values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var) or "")
candidates = [] candidates = []
for i in values: for i in values:
if i.startswith('virtual/'): if i.startswith('virtual/'):
@ -130,14 +130,14 @@ python do_package_qa_multilib() {
candidates.append(i) candidates.append(i)
if len(candidates) > 0: if len(candidates) > 0:
msg = "%s package %s - suspicious values '%s' in %s" \ msg = "%s package %s - suspicious values '%s' in %s" \
% (d.getVar('PN', True), pkg, ' '.join(candidates), var) % (d.getVar('PN'), pkg, ' '.join(candidates), var)
package_qa_handle_error("multilib", msg, d) package_qa_handle_error("multilib", msg, d)
ml = d.getVar('MLPREFIX', True) ml = d.getVar('MLPREFIX')
if not ml: if not ml:
return return
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
for pkg in packages.split(): for pkg in packages.split():
check_mlprefix(pkg, 'RDEPENDS', ml) check_mlprefix(pkg, 'RDEPENDS', ml)
check_mlprefix(pkg, 'RPROVIDES', ml) check_mlprefix(pkg, 'RPROVIDES', ml)

View File

@ -1,7 +1,7 @@
def preferred_ml_updates(d): def preferred_ml_updates(d):
# If any PREFERRED_PROVIDER or PREFERRED_VERSION are set, # If any PREFERRED_PROVIDER or PREFERRED_VERSION are set,
# we need to mirror these variables in the multilib case; # we need to mirror these variables in the multilib case;
multilibs = d.getVar('MULTILIBS', True) or "" multilibs = d.getVar('MULTILIBS') or ""
if not multilibs: if not multilibs:
return return
@ -102,7 +102,7 @@ def preferred_ml_updates(d):
prov = prov.replace("virtual/", "") prov = prov.replace("virtual/", "")
return "virtual/" + prefix + "-" + prov return "virtual/" + prefix + "-" + prov
mp = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() mp = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
extramp = [] extramp = []
for p in mp: for p in mp:
if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p: if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p:
@ -111,14 +111,14 @@ def preferred_ml_updates(d):
extramp.append(translate_provide(pref, p)) extramp.append(translate_provide(pref, p))
d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp)) d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp))
abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
extras = [] extras = []
for p in prefixes: for p in prefixes:
for a in abisafe: for a in abisafe:
extras.append(p + "-" + a) extras.append(p + "-" + a)
d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras)) d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras))
siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
extras = [] extras = []
for p in prefixes: for p in prefixes:
for a in siggen_exclude: for a in siggen_exclude:
@ -128,7 +128,7 @@ def preferred_ml_updates(d):
python multilib_virtclass_handler_vendor () { python multilib_virtclass_handler_vendor () {
if isinstance(e, bb.event.ConfigParsed): if isinstance(e, bb.event.ConfigParsed):
for v in e.data.getVar("MULTILIB_VARIANTS", True).split(): for v in e.data.getVar("MULTILIB_VARIANTS").split():
if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None: if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None:
e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v)
preferred_ml_updates(e.data) preferred_ml_updates(e.data)
@ -140,14 +140,14 @@ python multilib_virtclass_handler_global () {
if not e.data: if not e.data:
return return
variant = e.data.getVar("BBEXTENDVARIANT", True) variant = e.data.getVar("BBEXTENDVARIANT")
if isinstance(e, bb.event.RecipeParsed) and not variant: if isinstance(e, bb.event.RecipeParsed) and not variant:
if bb.data.inherits_class('kernel', e.data) or \ if bb.data.inherits_class('kernel', e.data) or \
bb.data.inherits_class('module-base', e.data) or \ bb.data.inherits_class('module-base', e.data) or \
(bb.data.inherits_class('allarch', e.data) and\ (bb.data.inherits_class('allarch', e.data) and\
not bb.data.inherits_class('packagegroup', e.data)): not bb.data.inherits_class('packagegroup', e.data)):
variants = (e.data.getVar("MULTILIB_VARIANTS", True) or "").split() variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split()
import oe.classextend import oe.classextend
clsextends = [] clsextends = []
@ -155,21 +155,21 @@ python multilib_virtclass_handler_global () {
clsextends.append(oe.classextend.ClassExtender(variant, e.data)) clsextends.append(oe.classextend.ClassExtender(variant, e.data))
# Process PROVIDES # Process PROVIDES
origprovs = provs = e.data.getVar("PROVIDES", True) or "" origprovs = provs = e.data.getVar("PROVIDES") or ""
for clsextend in clsextends: for clsextend in clsextends:
provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False)
e.data.setVar("PROVIDES", provs) e.data.setVar("PROVIDES", provs)
# Process RPROVIDES # Process RPROVIDES
origrprovs = rprovs = e.data.getVar("RPROVIDES", True) or "" origrprovs = rprovs = e.data.getVar("RPROVIDES") or ""
for clsextend in clsextends: for clsextend in clsextends:
rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False)
if rprovs.strip(): if rprovs.strip():
e.data.setVar("RPROVIDES", rprovs) e.data.setVar("RPROVIDES", rprovs)
# Process RPROVIDES_${PN}... # Process RPROVIDES_${PN}...
for pkg in (e.data.getVar("PACKAGES", True) or "").split(): for pkg in (e.data.getVar("PACKAGES") or "").split():
origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg, True) or "" origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg) or ""
for clsextend in clsextends: for clsextend in clsextends:
rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False) rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False)
rprovs = rprovs + " " + clsextend.extname + "-" + pkg rprovs = rprovs + " " + clsextend.extname + "-" + pkg

View File

@ -116,18 +116,18 @@ MACHINEOVERRIDES = ""
PATH_prepend = "${COREBASE}/scripts/native-intercept:" PATH_prepend = "${COREBASE}/scripts/native-intercept:"
python native_virtclass_handler () { python native_virtclass_handler () {
classextend = e.data.getVar('BBCLASSEXTEND', True) or "" classextend = e.data.getVar('BBCLASSEXTEND') or ""
if "native" not in classextend: if "native" not in classextend:
return return
pn = e.data.getVar("PN", True) pn = e.data.getVar("PN")
if not pn.endswith("-native"): if not pn.endswith("-native"):
return return
def map_dependencies(varname, d, suffix = ""): def map_dependencies(varname, d, suffix = ""):
if suffix: if suffix:
varname = varname + "_" + suffix varname = varname + "_" + suffix
deps = d.getVar(varname, True) deps = d.getVar(varname)
if not deps: if not deps:
return return
deps = bb.utils.explode_deps(deps) deps = bb.utils.explode_deps(deps)
@ -146,14 +146,14 @@ python native_virtclass_handler () {
e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native") e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native")
map_dependencies("DEPENDS", e.data) map_dependencies("DEPENDS", e.data)
for pkg in [e.data.getVar("PN", True), "", "${PN}"]: for pkg in [e.data.getVar("PN"), "", "${PN}"]:
map_dependencies("RDEPENDS", e.data, pkg) map_dependencies("RDEPENDS", e.data, pkg)
map_dependencies("RRECOMMENDS", e.data, pkg) map_dependencies("RRECOMMENDS", e.data, pkg)
map_dependencies("RSUGGESTS", e.data, pkg) map_dependencies("RSUGGESTS", e.data, pkg)
map_dependencies("RPROVIDES", e.data, pkg) map_dependencies("RPROVIDES", e.data, pkg)
map_dependencies("RREPLACES", e.data, pkg) map_dependencies("RREPLACES", e.data, pkg)
provides = e.data.getVar("PROVIDES", True) provides = e.data.getVar("PROVIDES")
nprovides = [] nprovides = []
for prov in provides.split(): for prov in provides.split():
if prov.find(pn) != -1: if prov.find(pn) != -1:

View File

@ -64,17 +64,17 @@ export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${libdir}/pkgconfig"
export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}" export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}"
python nativesdk_virtclass_handler () { python nativesdk_virtclass_handler () {
pn = e.data.getVar("PN", True) pn = e.data.getVar("PN")
if not (pn.endswith("-nativesdk") or pn.startswith("nativesdk-")): if not (pn.endswith("-nativesdk") or pn.startswith("nativesdk-")):
return return
e.data.setVar("MLPREFIX", "nativesdk-") e.data.setVar("MLPREFIX", "nativesdk-")
e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN", True).replace("-nativesdk", "").replace("nativesdk-", "")) e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN").replace("-nativesdk", "").replace("nativesdk-", ""))
e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk") e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk")
} }
python () { python () {
pn = d.getVar("PN", True) pn = d.getVar("PN")
if not pn.startswith("nativesdk-"): if not pn.startswith("nativesdk-"):
return return
@ -82,7 +82,7 @@ python () {
clsextend = oe.classextend.NativesdkClassExtender("nativesdk", d) clsextend = oe.classextend.NativesdkClassExtender("nativesdk", d)
clsextend.rename_packages() clsextend.rename_packages()
clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split())
clsextend.map_depends_variable("DEPENDS") clsextend.map_depends_variable("DEPENDS")
clsextend.map_packagevars() clsextend.map_packagevars()

View File

@ -13,7 +13,7 @@ def npm_oe_arch_map(target_arch, d):
elif re.match('arm64$', target_arch): return 'arm' elif re.match('arm64$', target_arch): return 'arm'
return target_arch return target_arch
NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH', True), d)}" NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH'), d)}"
npm_do_compile() { npm_do_compile() {
# Copy in any additionally fetched modules # Copy in any additionally fetched modules
@ -59,7 +59,7 @@ python populate_packages_prepend () {
description = pdata.get('description', None) description = pdata.get('description', None)
if description: if description:
d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'")) d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'"))
d.appendVar('RDEPENDS_%s' % d.getVar('PN', True), ' %s' % ' '.join(pkgnames).replace('_', '-')) d.appendVar('RDEPENDS_%s' % d.getVar('PN'), ' %s' % ' '.join(pkgnames).replace('_', '-'))
} }
FILES_${PN} += " \ FILES_${PN} += " \

View File

@ -1,7 +1,7 @@
addtask lint before do_build addtask lint before do_build
do_lint[nostamp] = "1" do_lint[nostamp] = "1"
python do_lint() { python do_lint() {
pkgname = d.getVar("PN", True) pkgname = d.getVar("PN")
############################## ##############################
# Test that DESCRIPTION exists # Test that DESCRIPTION exists
@ -35,7 +35,7 @@ python do_lint() {
# Check that all patches have Signed-off-by and Upstream-Status # Check that all patches have Signed-off-by and Upstream-Status
# #
srcuri = d.getVar("SRC_URI", False).split() srcuri = d.getVar("SRC_URI", False).split()
fpaths = (d.getVar('FILESPATH', True) or '').split(':') fpaths = (d.getVar('FILESPATH') or '').split(':')
def findPatch(patchname): def findPatch(patchname):
for dir in fpaths: for dir in fpaths:

View File

@ -120,7 +120,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
""" """
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
root = d.expand(root) root = d.expand(root)
output_pattern = d.expand(output_pattern) output_pattern = d.expand(output_pattern)
extra_depends = d.expand(extra_depends) extra_depends = d.expand(extra_depends)
@ -130,7 +130,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
if not os.path.exists(dvar + root): if not os.path.exists(dvar + root):
return [] return []
ml = d.getVar("MLPREFIX", True) ml = d.getVar("MLPREFIX")
if ml: if ml:
if not output_pattern.startswith(ml): if not output_pattern.startswith(ml):
output_pattern = ml + output_pattern output_pattern = ml + output_pattern
@ -145,7 +145,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
extra_depends = " ".join(newdeps) extra_depends = " ".join(newdeps)
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
split_packages = set() split_packages = set()
if postinst: if postinst:
@ -163,7 +163,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
objs.append(relpath) objs.append(relpath)
if extra_depends == None: if extra_depends == None:
extra_depends = d.getVar("PN", True) extra_depends = d.getVar("PN")
if not summary: if not summary:
summary = description summary = description
@ -189,7 +189,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
packages = [pkg] + packages packages = [pkg] + packages
else: else:
packages.append(pkg) packages.append(pkg)
oldfiles = d.getVar('FILES_' + pkg, True) oldfiles = d.getVar('FILES_' + pkg)
newfile = os.path.join(root, o) newfile = os.path.join(root, o)
# These names will be passed through glob() so if the filename actually # These names will be passed through glob() so if the filename actually
# contains * or ? (rare, but possible) we need to handle that specially # contains * or ? (rare, but possible) we need to handle that specially
@ -214,9 +214,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
d.setVar('FILES_' + pkg, oldfiles + " " + newfile) d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
if extra_depends != '': if extra_depends != '':
d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
if not d.getVar('DESCRIPTION_' + pkg, True): if not d.getVar('DESCRIPTION_' + pkg):
d.setVar('DESCRIPTION_' + pkg, description % on) d.setVar('DESCRIPTION_' + pkg, description % on)
if not d.getVar('SUMMARY_' + pkg, True): if not d.getVar('SUMMARY_' + pkg):
d.setVar('SUMMARY_' + pkg, summary % on) d.setVar('SUMMARY_' + pkg, summary % on)
if postinst: if postinst:
d.setVar('pkg_postinst_' + pkg, postinst) d.setVar('pkg_postinst_' + pkg, postinst)
@ -231,9 +231,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
PACKAGE_DEPENDS += "file-native" PACKAGE_DEPENDS += "file-native"
python () { python () {
if d.getVar('PACKAGES', True) != '': if d.getVar('PACKAGES') != '':
deps = "" deps = ""
for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split(): for dep in (d.getVar('PACKAGE_DEPENDS') or "").split():
deps += " %s:do_populate_sysroot" % dep deps += " %s:do_populate_sysroot" % dep
d.appendVarFlag('do_package', 'depends', deps) d.appendVarFlag('do_package', 'depends', deps)
@ -286,14 +286,14 @@ def files_from_filevars(filevars):
# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files # Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files
def get_conffiles(pkg, d): def get_conffiles(pkg, d):
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
root = os.path.join(pkgdest, pkg) root = os.path.join(pkgdest, pkg)
cwd = os.getcwd() cwd = os.getcwd()
os.chdir(root) os.chdir(root)
conffiles = d.getVar('CONFFILES_%s' % pkg, True); conffiles = d.getVar('CONFFILES_%s' % pkg);
if conffiles == None: if conffiles == None:
conffiles = d.getVar('CONFFILES', True) conffiles = d.getVar('CONFFILES')
if conffiles == None: if conffiles == None:
conffiles = "" conffiles = ""
conffiles = conffiles.split() conffiles = conffiles.split()
@ -318,7 +318,7 @@ def get_conffiles(pkg, d):
return conf_list return conf_list
def checkbuildpath(file, d): def checkbuildpath(file, d):
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
with open(file) as f: with open(file) as f:
file_content = f.read() file_content = f.read()
if tmpdir in file_content: if tmpdir in file_content:
@ -335,8 +335,8 @@ def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d):
import stat import stat
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
objcopy = d.getVar("OBJCOPY", True) objcopy = d.getVar("OBJCOPY")
debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
# We ignore kernel modules, we don't generate debug info files. # We ignore kernel modules, we don't generate debug info files.
@ -382,11 +382,11 @@ def copydebugsources(debugsrcdir, d):
sourcefile = d.expand("${WORKDIR}/debugsources.list") sourcefile = d.expand("${WORKDIR}/debugsources.list")
if debugsrcdir and os.path.isfile(sourcefile): if debugsrcdir and os.path.isfile(sourcefile):
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
strip = d.getVar("STRIP", True) strip = d.getVar("STRIP")
objcopy = d.getVar("OBJCOPY", True) objcopy = d.getVar("OBJCOPY")
debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
workdir = d.getVar("WORKDIR", True) workdir = d.getVar("WORKDIR")
workparentdir = os.path.dirname(os.path.dirname(workdir)) workparentdir = os.path.dirname(os.path.dirname(workdir))
workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)
@ -468,20 +468,20 @@ def get_package_additional_metadata (pkg_type, d):
return "\n".join(metadata_fields).strip() return "\n".join(metadata_fields).strip()
def runtime_mapping_rename (varname, pkg, d): def runtime_mapping_rename (varname, pkg, d):
#bb.note("%s before: %s" % (varname, d.getVar(varname, True))) #bb.note("%s before: %s" % (varname, d.getVar(varname)))
if bb.data.inherits_class('packagegroup', d): if bb.data.inherits_class('packagegroup', d):
return return
new_depends = {} new_depends = {}
deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "") deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "")
for depend in deps: for depend in deps:
new_depend = get_package_mapping(depend, pkg, d) new_depend = get_package_mapping(depend, pkg, d)
new_depends[new_depend] = deps[depend] new_depends[new_depend] = deps[depend]
d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False))
#bb.note("%s after: %s" % (varname, d.getVar(varname, True))) #bb.note("%s after: %s" % (varname, d.getVar(varname)))
# #
# Package functions suitable for inclusion in PACKAGEFUNCS # Package functions suitable for inclusion in PACKAGEFUNCS
@ -492,34 +492,34 @@ python package_get_auto_pr() {
import re import re
# Support per recipe PRSERV_HOST # Support per recipe PRSERV_HOST
pn = d.getVar('PN', True) pn = d.getVar('PN')
host = d.getVar("PRSERV_HOST_" + pn, True) host = d.getVar("PRSERV_HOST_" + pn)
if not (host is None): if not (host is None):
d.setVar("PRSERV_HOST", host) d.setVar("PRSERV_HOST", host)
pkgv = d.getVar("PKGV", True) pkgv = d.getVar("PKGV")
# PR Server not active, handle AUTOINC # PR Server not active, handle AUTOINC
if not d.getVar('PRSERV_HOST', True): if not d.getVar('PRSERV_HOST'):
if 'AUTOINC' in pkgv: if 'AUTOINC' in pkgv:
d.setVar("PKGV", pkgv.replace("AUTOINC", "0")) d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
return return
auto_pr = None auto_pr = None
pv = d.getVar("PV", True) pv = d.getVar("PV")
version = d.getVar("PRAUTOINX", True) version = d.getVar("PRAUTOINX")
pkgarch = d.getVar("PACKAGE_ARCH", True) pkgarch = d.getVar("PACKAGE_ARCH")
checksum = d.getVar("BB_TASKHASH", True) checksum = d.getVar("BB_TASKHASH")
if d.getVar('PRSERV_LOCKDOWN', True): if d.getVar('PRSERV_LOCKDOWN'):
auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None
if auto_pr is None: if auto_pr is None:
bb.fatal("Can NOT get PRAUTO from lockdown exported file") bb.fatal("Can NOT get PRAUTO from lockdown exported file")
d.setVar('PRAUTO',str(auto_pr)) d.setVar('PRAUTO',str(auto_pr))
return return
try: try:
conn = d.getVar("__PRSERV_CONN", True) conn = d.getVar("__PRSERV_CONN")
if conn is None: if conn is None:
conn = oe.prservice.prserv_make_conn(d) conn = oe.prservice.prserv_make_conn(d)
if conn is not None: if conn is not None:
@ -540,19 +540,19 @@ python package_get_auto_pr() {
LOCALEBASEPN ??= "${PN}" LOCALEBASEPN ??= "${PN}"
python package_do_split_locales() { python package_do_split_locales() {
if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
bb.debug(1, "package requested not splitting locales") bb.debug(1, "package requested not splitting locales")
return return
packages = (d.getVar('PACKAGES', True) or "").split() packages = (d.getVar('PACKAGES') or "").split()
datadir = d.getVar('datadir', True) datadir = d.getVar('datadir')
if not datadir: if not datadir:
bb.note("datadir not defined") bb.note("datadir not defined")
return return
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
pn = d.getVar('LOCALEBASEPN', True) pn = d.getVar('LOCALEBASEPN')
if pn + '-locale' in packages: if pn + '-locale' in packages:
packages.remove(pn + '-locale') packages.remove(pn + '-locale')
@ -565,10 +565,10 @@ python package_do_split_locales() {
locales = os.listdir(localedir) locales = os.listdir(localedir)
summary = d.getVar('SUMMARY', True) or pn summary = d.getVar('SUMMARY') or pn
description = d.getVar('DESCRIPTION', True) or "" description = d.getVar('DESCRIPTION') or ""
locale_section = d.getVar('LOCALE_SECTION', True) locale_section = d.getVar('LOCALE_SECTION')
mlprefix = d.getVar('MLPREFIX', True) or "" mlprefix = d.getVar('MLPREFIX') or ""
for l in sorted(locales): for l in sorted(locales):
ln = legitimize_package_name(l) ln = legitimize_package_name(l)
pkg = pn + '-locale-' + ln pkg = pn + '-locale-' + ln
@ -589,14 +589,14 @@ python package_do_split_locales() {
# glibc-localedata-translit* won't install as a dependency # glibc-localedata-translit* won't install as a dependency
# for some other package which breaks meta-toolchain # for some other package which breaks meta-toolchain
# Probably breaks since virtual-locale- isn't provided anywhere # Probably breaks since virtual-locale- isn't provided anywhere
#rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split() #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split()
#rdep.append('%s-locale*' % pn) #rdep.append('%s-locale*' % pn)
#d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))
} }
python perform_packagecopy () { python perform_packagecopy () {
dest = d.getVar('D', True) dest = d.getVar('D')
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
# Start by package population by taking a copy of the installed # Start by package population by taking a copy of the installed
# files to operate on # files to operate on
@ -730,8 +730,8 @@ python fixup_perms () {
# paths are resolved via BBPATH # paths are resolved via BBPATH
def get_fs_perms_list(d): def get_fs_perms_list(d):
str = "" str = ""
bbpath = d.getVar('BBPATH', True) bbpath = d.getVar('BBPATH')
fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES')
if not fs_perms_tables: if not fs_perms_tables:
fs_perms_tables = 'files/fs-perms.txt' fs_perms_tables = 'files/fs-perms.txt'
for conf_file in fs_perms_tables.split(): for conf_file in fs_perms_tables.split():
@ -740,7 +740,7 @@ python fixup_perms () {
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
fs_perms_table = {} fs_perms_table = {}
fs_link_table = {} fs_link_table = {}
@ -769,7 +769,7 @@ python fixup_perms () {
'oldincludedir' ] 'oldincludedir' ]
for path in target_path_vars: for path in target_path_vars:
dir = d.getVar(path, True) or "" dir = d.getVar(path) or ""
if dir == "": if dir == "":
continue continue
fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d))
@ -854,20 +854,20 @@ python fixup_perms () {
python split_and_strip_files () { python split_and_strip_files () {
import stat, errno import stat, errno
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
pn = d.getVar('PN', True) pn = d.getVar('PN')
oldcwd = os.getcwd() oldcwd = os.getcwd()
os.chdir(dvar) os.chdir(dvar)
# We default to '.debug' style # We default to '.debug' style
if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
# Single debug-file-directory style debug info # Single debug-file-directory style debug info
debugappend = ".debug" debugappend = ".debug"
debugdir = "" debugdir = ""
debuglibdir = "/usr/lib/debug" debuglibdir = "/usr/lib/debug"
debugsrcdir = "/usr/src/debug" debugsrcdir = "/usr/src/debug"
elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src': elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
# Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
debugappend = "" debugappend = ""
debugdir = "/.debug" debugdir = "/.debug"
@ -918,10 +918,10 @@ python split_and_strip_files () {
symlinks = {} symlinks = {}
kernmods = [] kernmods = []
inodes = {} inodes = {}
libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1' or \ if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
for root, dirs, files in cpath.walk(dvar): for root, dirs, files in cpath.walk(dvar):
for f in files: for f in files:
file = os.path.join(root, f) file = os.path.join(root, f)
@ -962,7 +962,7 @@ python split_and_strip_files () {
elf_file = isELF(file) elf_file = isELF(file)
if elf_file & 1: if elf_file & 1:
if elf_file & 2: if elf_file & 2:
if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
else: else:
msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
@ -991,7 +991,7 @@ python split_and_strip_files () {
# #
# First lets process debug splitting # First lets process debug splitting
# #
if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
for file in elffiles: for file in elffiles:
src = file[len(dvar):] src = file[len(dvar):]
dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
@ -1054,8 +1054,8 @@ python split_and_strip_files () {
# #
# Now lets go back over things and strip them # Now lets go back over things and strip them
# #
if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
strip = d.getVar("STRIP", True) strip = d.getVar("STRIP")
sfiles = [] sfiles = []
for file in elffiles: for file in elffiles:
elf_file = int(elffiles[file]) elf_file = int(elffiles[file])
@ -1075,16 +1075,16 @@ python split_and_strip_files () {
python populate_packages () { python populate_packages () {
import glob, re import glob, re
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
outdir = d.getVar('DEPLOY_DIR', True) outdir = d.getVar('DEPLOY_DIR')
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
pn = d.getVar('PN', True) pn = d.getVar('PN')
bb.utils.mkdirhier(outdir) bb.utils.mkdirhier(outdir)
os.chdir(dvar) os.chdir(dvar)
autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG", True) or False) autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)
# Sanity check PACKAGES for duplicates # Sanity check PACKAGES for duplicates
# Sanity should be moved to sanity.bbclass once we have the infrastucture # Sanity should be moved to sanity.bbclass once we have the infrastucture
@ -1099,7 +1099,7 @@ python populate_packages () {
else: else:
package_list.append(pkg) package_list.append(pkg)
d.setVar('PACKAGES', ' '.join(package_list)) d.setVar('PACKAGES', ' '.join(package_list))
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
seen = [] seen = []
@ -1120,7 +1120,7 @@ python populate_packages () {
root = os.path.join(pkgdest, pkg) root = os.path.join(pkgdest, pkg)
bb.utils.mkdirhier(root) bb.utils.mkdirhier(root)
filesvar = d.getVar('FILES_%s' % pkg, True) or "" filesvar = d.getVar('FILES_%s' % pkg) or ""
if "//" in filesvar: if "//" in filesvar:
msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
package_qa_handle_error("files-invalid", msg, d) package_qa_handle_error("files-invalid", msg, d)
@ -1188,7 +1188,7 @@ python populate_packages () {
# Handle LICENSE_EXCLUSION # Handle LICENSE_EXCLUSION
package_list = [] package_list = []
for pkg in packages.split(): for pkg in packages.split():
if d.getVar('LICENSE_EXCLUSION-' + pkg, True): if d.getVar('LICENSE_EXCLUSION-' + pkg):
msg = "%s has an incompatible license. Excluding from packaging." % pkg msg = "%s has an incompatible license. Excluding from packaging." % pkg
package_qa_handle_error("incompatible-license", msg, d) package_qa_handle_error("incompatible-license", msg, d)
else: else:
@ -1207,7 +1207,7 @@ python populate_packages () {
if unshipped != []: if unshipped != []:
msg = pn + ": Files/directories were installed but not shipped in any package:" msg = pn + ": Files/directories were installed but not shipped in any package:"
if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split():
bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
else: else:
for f in unshipped: for f in unshipped:
@ -1220,7 +1220,7 @@ populate_packages[dirs] = "${D}"
python package_fixsymlinks () { python package_fixsymlinks () {
import errno import errno
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
packages = d.getVar("PACKAGES", False).split() packages = d.getVar("PACKAGES", False).split()
dangling_links = {} dangling_links = {}
@ -1255,7 +1255,7 @@ python package_fixsymlinks () {
bb.note("%s contains dangling symlink to %s" % (pkg, l)) bb.note("%s contains dangling symlink to %s" % (pkg, l))
for pkg in newrdepends: for pkg in newrdepends:
rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
for p in newrdepends[pkg]: for p in newrdepends[pkg]:
if p not in rdepends: if p not in rdepends:
rdepends[p] = [] rdepends[p] = []
@ -1309,9 +1309,9 @@ python emit_pkgdata() {
with open(subdata_file, 'w') as fd: with open(subdata_file, 'w') as fd:
fd.write("PKG_%s: %s" % (ml_pkg, pkg)) fd.write("PKG_%s: %s" % (ml_pkg, pkg))
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
pkgdatadir = d.getVar('PKGDESTWORK', True) pkgdatadir = d.getVar('PKGDESTWORK')
# Take shared lock since we're only reading, not writing # Take shared lock since we're only reading, not writing
lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
@ -1321,9 +1321,9 @@ python emit_pkgdata() {
f.write("PACKAGES: %s\n" % packages) f.write("PACKAGES: %s\n" % packages)
f.close() f.close()
pn = d.getVar('PN', True) pn = d.getVar('PN')
global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split() global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split() variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
write_extra_pkgs(variants, pn, packages, pkgdatadir) write_extra_pkgs(variants, pn, packages, pkgdatadir)
@ -1331,10 +1331,10 @@ python emit_pkgdata() {
if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)): if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)):
write_extra_pkgs(global_variants, pn, packages, pkgdatadir) write_extra_pkgs(global_variants, pn, packages, pkgdatadir)
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
for pkg in packages.split(): for pkg in packages.split():
pkgval = d.getVar('PKG_%s' % pkg, True) pkgval = d.getVar('PKG_%s' % pkg)
if pkgval is None: if pkgval is None:
pkgval = pkg pkgval = pkg
d.setVar('PKG_%s' % pkg, pkg) d.setVar('PKG_%s' % pkg, pkg)
@ -1377,11 +1377,11 @@ python emit_pkgdata() {
write_if_exists(sf, pkg, 'pkg_prerm') write_if_exists(sf, pkg, 'pkg_prerm')
write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
write_if_exists(sf, pkg, 'FILES_INFO') write_if_exists(sf, pkg, 'FILES_INFO')
for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split():
write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split():
write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
@ -1394,9 +1394,9 @@ python emit_pkgdata() {
bb.utils.mkdirhier(os.path.dirname(subdata_sym)) bb.utils.mkdirhier(os.path.dirname(subdata_sym))
oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)
allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg)
if not allow_empty: if not allow_empty:
allow_empty = d.getVar('ALLOW_EMPTY', True) allow_empty = d.getVar('ALLOW_EMPTY')
root = "%s/%s" % (pkgdest, pkg) root = "%s/%s" % (pkgdest, pkg)
os.chdir(root) os.chdir(root)
g = glob('*') g = glob('*')
@ -1435,19 +1435,19 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI
# FILERDEPENDS_filepath_pkg - per file dep # FILERDEPENDS_filepath_pkg - per file dep
python package_do_filedeps() { python package_do_filedeps() {
if d.getVar('SKIP_FILEDEPS', True) == '1': if d.getVar('SKIP_FILEDEPS') == '1':
return return
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
rpmdeps = d.getVar('RPMDEPS', True) rpmdeps = d.getVar('RPMDEPS')
def chunks(files, n): def chunks(files, n):
return [files[i:i+n] for i in range(0, len(files), n)] return [files[i:i+n] for i in range(0, len(files), n)]
pkglist = [] pkglist = []
for pkg in packages.split(): for pkg in packages.split():
if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1': if d.getVar('SKIP_FILEDEPS_' + pkg) == '1':
continue continue
if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'):
continue continue
@ -1496,22 +1496,22 @@ python package_do_shlibs() {
return return
lib_re = re.compile("^.*\.so") lib_re = re.compile("^.*\.so")
libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) libdir_re = re.compile(".*/%s$" % d.getVar('baselib'))
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
targetos = d.getVar('TARGET_OS', True) targetos = d.getVar('TARGET_OS')
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
ver = d.getVar('PKGV', True) ver = d.getVar('PKGV')
if not ver: if not ver:
msg = "PKGV not defined" msg = "PKGV not defined"
package_qa_handle_error("pkgv-undefined", msg, d) package_qa_handle_error("pkgv-undefined", msg, d)
return return
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) shlibswork_dir = d.getVar('SHLIBSWORKDIR')
# Take shared lock since we're only reading, not writing # Take shared lock since we're only reading, not writing
lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
@ -1519,7 +1519,7 @@ python package_do_shlibs() {
def linux_so(file, needed, sonames, renames, pkgver): def linux_so(file, needed, sonames, renames, pkgver):
needs_ldconfig = False needs_ldconfig = False
ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null" cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
fd = os.popen(cmd) fd = os.popen(cmd)
lines = fd.readlines() lines = fd.readlines()
fd.close() fd.close()
@ -1601,12 +1601,12 @@ python package_do_shlibs() {
if name and name not in needed[pkg]: if name and name not in needed[pkg]:
needed[pkg].append((name, file, [])) needed[pkg].append((name, file, []))
if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
snap_symlinks = True snap_symlinks = True
else: else:
snap_symlinks = False snap_symlinks = False
if (d.getVar('USE_LDCONFIG', True) or "1") == "1": if (d.getVar('USE_LDCONFIG') or "1") == "1":
use_ldconfig = True use_ldconfig = True
else: else:
use_ldconfig = False use_ldconfig = False
@ -1615,14 +1615,14 @@ python package_do_shlibs() {
shlib_provider = oe.package.read_shlib_providers(d) shlib_provider = oe.package.read_shlib_providers(d)
for pkg in packages.split(): for pkg in packages.split():
private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or "" private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
private_libs = private_libs.split() private_libs = private_libs.split()
needs_ldconfig = False needs_ldconfig = False
bb.debug(2, "calculating shlib provides for %s" % pkg) bb.debug(2, "calculating shlib provides for %s" % pkg)
pkgver = d.getVar('PKGV_' + pkg, True) pkgver = d.getVar('PKGV_' + pkg)
if not pkgver: if not pkgver:
pkgver = d.getVar('PV_' + pkg, True) pkgver = d.getVar('PV_' + pkg)
if not pkgver: if not pkgver:
pkgver = ver pkgver = ver
@ -1659,18 +1659,18 @@ python package_do_shlibs() {
fd.close() fd.close()
if needs_ldconfig and use_ldconfig: if needs_ldconfig and use_ldconfig:
bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('ldconfig_postinst_fragment', True) postinst += d.getVar('ldconfig_postinst_fragment')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
assumed_libs = d.getVar('ASSUME_SHLIBS', True) assumed_libs = d.getVar('ASSUME_SHLIBS')
if assumed_libs: if assumed_libs:
libdir = d.getVar("libdir", True) libdir = d.getVar("libdir")
for e in assumed_libs.split(): for e in assumed_libs.split():
l, dep_pkg = e.split(":") l, dep_pkg = e.split(":")
lib_ver = None lib_ver = None
@ -1682,7 +1682,7 @@ python package_do_shlibs() {
shlib_provider[l] = {} shlib_provider[l] = {}
shlib_provider[l][libdir] = (dep_pkg, lib_ver) shlib_provider[l][libdir] = (dep_pkg, lib_ver)
libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)] libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]
for pkg in packages.split(): for pkg in packages.split():
bb.debug(2, "calculating shlib requirements for %s" % pkg) bb.debug(2, "calculating shlib requirements for %s" % pkg)
@ -1736,12 +1736,12 @@ python package_do_shlibs() {
python package_do_pkgconfig () { python package_do_pkgconfig () {
import re import re
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() shlibs_dirs = d.getVar('SHLIBSDIRS').split()
shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) shlibswork_dir = d.getVar('SHLIBSWORKDIR')
pc_re = re.compile('(.*)\.pc$') pc_re = re.compile('(.*)\.pc$')
var_re = re.compile('(.*)=(.*)') var_re = re.compile('(.*)=(.*)')
@ -1826,7 +1826,7 @@ python package_do_pkgconfig () {
def read_libdep_files(d): def read_libdep_files(d):
pkglibdeps = {} pkglibdeps = {}
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
for pkg in packages: for pkg in packages:
pkglibdeps[pkg] = {} pkglibdeps[pkg] = {}
for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
@ -1846,9 +1846,9 @@ def read_libdep_files(d):
python read_shlibdeps () { python read_shlibdeps () {
pkglibdeps = read_libdep_files(d) pkglibdeps = read_libdep_files(d)
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
for pkg in packages: for pkg in packages:
rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
for dep in pkglibdeps[pkg]: for dep in pkglibdeps[pkg]:
# Add the dep if it's not already there, or if no comparison is set # Add the dep if it's not already there, or if no comparison is set
if dep not in rdepends: if dep not in rdepends:
@ -1873,14 +1873,14 @@ python package_depchains() {
package. package.
""" """
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() prefixes = (d.getVar('DEPCHAIN_PRE') or '').split()
def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):
#bb.note('depends for %s is %s' % (base, depends)) #bb.note('depends for %s is %s' % (base, depends))
rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
for depend in depends: for depend in depends:
if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
@ -1901,7 +1901,7 @@ python package_depchains() {
def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
#bb.note('rdepends for %s is %s' % (base, rdepends)) #bb.note('rdepends for %s is %s' % (base, rdepends))
rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")
for depend in rdepends: for depend in rdepends:
if depend.find('virtual-locale-') != -1: if depend.find('virtual-locale-') != -1:
@ -1924,12 +1924,12 @@ python package_depchains() {
list.append(dep) list.append(dep)
depends = [] depends = []
for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
add_dep(depends, dep) add_dep(depends, dep)
rdepends = [] rdepends = []
for pkg in packages.split(): for pkg in packages.split():
for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""):
add_dep(rdepends, dep) add_dep(rdepends, dep)
#bb.note('rdepends is %s' % rdepends) #bb.note('rdepends is %s' % rdepends)
@ -1959,7 +1959,7 @@ python package_depchains() {
for pkg in pkglibdeps: for pkg in pkglibdeps:
for k in pkglibdeps[pkg]: for k in pkglibdeps[pkg]:
add_dep(pkglibdeplist, k) add_dep(pkglibdeplist, k)
dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (bb.data.inherits_class('packagegroup', d))) dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))
for suffix in pkgs: for suffix in pkgs:
for pkg in pkgs[suffix]: for pkg in pkgs[suffix]:
@ -1976,7 +1976,7 @@ python package_depchains() {
pkg_addrrecs(pkg, base, suffix, func, rdepends, d) pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
else: else:
rdeps = [] rdeps = []
for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""): for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""):
add_dep(rdeps, dep) add_dep(rdeps, dep)
pkg_addrrecs(pkg, base, suffix, func, rdeps, d) pkg_addrrecs(pkg, base, suffix, func, rdeps, d)
} }
@ -1987,8 +1987,8 @@ PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDE
def gen_packagevar(d): def gen_packagevar(d):
ret = [] ret = []
pkgs = (d.getVar("PACKAGES", True) or "").split() pkgs = (d.getVar("PACKAGES") or "").split()
vars = (d.getVar("PACKAGEVARS", True) or "").split() vars = (d.getVar("PACKAGEVARS") or "").split()
for p in pkgs: for p in pkgs:
for v in vars: for v in vars:
ret.append(v + "_" + p) ret.append(v + "_" + p)
@ -2036,16 +2036,16 @@ python do_package () {
# Sanity test the setup # Sanity test the setup
########################################################################### ###########################################################################
packages = (d.getVar('PACKAGES', True) or "").split() packages = (d.getVar('PACKAGES') or "").split()
if len(packages) < 1: if len(packages) < 1:
bb.debug(1, "No packages to build, skipping do_package") bb.debug(1, "No packages to build, skipping do_package")
return return
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
outdir = d.getVar('DEPLOY_DIR', True) outdir = d.getVar('DEPLOY_DIR')
dest = d.getVar('D', True) dest = d.getVar('D')
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD')
pn = d.getVar('PN', True) pn = d.getVar('PN')
if not workdir or not outdir or not dest or not dvar or not pn: if not workdir or not outdir or not dest or not dvar or not pn:
msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
@ -2063,7 +2063,7 @@ python do_package () {
# code pre-expands some frequently used variables # code pre-expands some frequently used variables
def expandVar(x, d): def expandVar(x, d):
d.setVar(x, d.getVar(x, True)) d.setVar(x, d.getVar(x))
for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
expandVar(x, d) expandVar(x, d)
@ -2072,7 +2072,7 @@ python do_package () {
# Setup PKGD (from D) # Setup PKGD (from D)
########################################################################### ###########################################################################
for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split(): for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
########################################################################### ###########################################################################
@ -2081,7 +2081,7 @@ python do_package () {
cpath = oe.cachedpath.CachedPath() cpath = oe.cachedpath.CachedPath()
for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split(): for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
########################################################################### ###########################################################################
@ -2091,18 +2091,18 @@ python do_package () {
# Build global list of files in each split package # Build global list of files in each split package
global pkgfiles global pkgfiles
pkgfiles = {} pkgfiles = {}
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
for pkg in packages: for pkg in packages:
pkgfiles[pkg] = [] pkgfiles[pkg] = []
for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
for file in files: for file in files:
pkgfiles[pkg].append(walkroot + os.sep + file) pkgfiles[pkg].append(walkroot + os.sep + file)
for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): for f in (d.getVar('PACKAGEFUNCS') or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
qa_sane = d.getVar("QA_SANE", True) qa_sane = d.getVar("QA_SANE")
if not qa_sane: if not qa_sane:
bb.fatal("Fatal QA errors found, failing task.") bb.fatal("Fatal QA errors found, failing task.")
} }
@ -2149,7 +2149,7 @@ def mapping_rename_hook(d):
Rewrite variables to account for package renaming in things Rewrite variables to account for package renaming in things
like debian.bbclass or manual PKG variable name changes like debian.bbclass or manual PKG variable name changes
""" """
pkg = d.getVar("PKG", True) pkg = d.getVar("PKG")
runtime_mapping_rename("RDEPENDS", pkg, d) runtime_mapping_rename("RDEPENDS", pkg, d)
runtime_mapping_rename("RRECOMMENDS", pkg, d) runtime_mapping_rename("RRECOMMENDS", pkg, d)
runtime_mapping_rename("RSUGGESTS", pkg, d) runtime_mapping_rename("RSUGGESTS", pkg, d)

View File

@ -6,14 +6,14 @@ inherit package
IMAGE_PKGTYPE ?= "deb" IMAGE_PKGTYPE ?= "deb"
DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH', True), d.getVar('TUNE_FEATURES', True))}" DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'))}"
DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}" DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}"
PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs"
APTCONF_TARGET = "${WORKDIR}" APTCONF_TARGET = "${WORKDIR}"
APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
def debian_arch_map(arch, tune): def debian_arch_map(arch, tune):
tune_features = tune.split() tune_features = tune.split()
@ -56,22 +56,22 @@ python do_package_deb () {
oldcwd = os.getcwd() oldcwd = os.getcwd()
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
outdir = d.getVar('PKGWRITEDIRDEB', True) outdir = d.getVar('PKGWRITEDIRDEB')
if not outdir: if not outdir:
bb.error("PKGWRITEDIRDEB not defined, unable to package") bb.error("PKGWRITEDIRDEB not defined, unable to package")
return return
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
if not packages: if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package") bb.debug(1, "PACKAGES not defined, nothing to package")
return return
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@ -80,7 +80,7 @@ python do_package_deb () {
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
def cleanupcontrol(root): def cleanupcontrol(root):
for p in ['CONTROL', 'DEBIAN']: for p in ['CONTROL', 'DEBIAN']:
@ -96,7 +96,7 @@ python do_package_deb () {
localdata.setVar('ROOT', '') localdata.setVar('ROOT', '')
localdata.setVar('ROOT_%s' % pkg, root) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = localdata.getVar('PKG_%s' % pkg, True) pkgname = localdata.getVar('PKG_%s' % pkg)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
localdata.setVar('PKG', pkgname) localdata.setVar('PKG', pkgname)
@ -106,7 +106,7 @@ python do_package_deb () {
bb.data.update_data(localdata) bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root)) basedir = os.path.join(os.path.dirname(root))
pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True)) pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH'))
bb.utils.mkdirhier(pkgoutdir) bb.utils.mkdirhier(pkgoutdir)
os.chdir(root) os.chdir(root)
@ -114,7 +114,7 @@ python do_package_deb () {
from glob import glob from glob import glob
g = glob('*') g = glob('*')
if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": if not g and localdata.getVar('ALLOW_EMPTY', False) != "1":
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
continue continue
@ -129,7 +129,7 @@ python do_package_deb () {
bb.fatal("unable to open control file for writing") bb.fatal("unable to open control file for writing")
fields = [] fields = []
pe = d.getVar('PKGE', True) pe = d.getVar('PKGE')
if pe and int(pe) > 0: if pe and int(pe) > 0:
fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
else: else:
@ -141,7 +141,7 @@ python do_package_deb () {
fields.append(["Architecture: %s\n", ['DPKG_ARCH']]) fields.append(["Architecture: %s\n", ['DPKG_ARCH']])
fields.append(["OE: %s\n", ['PN']]) fields.append(["OE: %s\n", ['PN']])
fields.append(["PackageArch: %s\n", ['PACKAGE_ARCH']]) fields.append(["PackageArch: %s\n", ['PACKAGE_ARCH']])
if d.getVar('HOMEPAGE', True): if d.getVar('HOMEPAGE'):
fields.append(["Homepage: %s\n", ['HOMEPAGE']]) fields.append(["Homepage: %s\n", ['HOMEPAGE']])
# Package, Version, Maintainer, Description - mandatory # Package, Version, Maintainer, Description - mandatory
@ -151,10 +151,10 @@ python do_package_deb () {
def pullData(l, d): def pullData(l, d):
l2 = [] l2 = []
for i in l: for i in l:
data = d.getVar(i, True) data = d.getVar(i)
if data is None: if data is None:
raise KeyError(f) raise KeyError(f)
if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH', True) == 'all': if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH') == 'all':
data = 'all' data = 'all'
elif i == 'PACKAGE_ARCH' or i == 'DPKG_ARCH': elif i == 'PACKAGE_ARCH' or i == 'DPKG_ARCH':
# The params in deb package control don't allow character # The params in deb package control don't allow character
@ -165,7 +165,7 @@ python do_package_deb () {
return l2 return l2
ctrlfile.write("Package: %s\n" % pkgname) ctrlfile.write("Package: %s\n" % pkgname)
if d.getVar('PACKAGE_ARCH', True) == "all": if d.getVar('PACKAGE_ARCH') == "all":
ctrlfile.write("Multi-Arch: foreign\n") ctrlfile.write("Multi-Arch: foreign\n")
# check for required fields # check for required fields
try: try:
@ -175,9 +175,9 @@ python do_package_deb () {
raise KeyError(f) raise KeyError(f)
# Special behavior for description... # Special behavior for description...
if 'DESCRIPTION' in fs: if 'DESCRIPTION' in fs:
summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "."
ctrlfile.write('Description: %s\n' % summary) ctrlfile.write('Description: %s\n' % summary)
description = localdata.getVar('DESCRIPTION', True) or "." description = localdata.getVar('DESCRIPTION') or "."
description = textwrap.dedent(description).strip() description = textwrap.dedent(description).strip()
if '\\n' in description: if '\\n' in description:
# Manually indent # Manually indent
@ -231,7 +231,7 @@ python do_package_deb () {
elif (v or "").startswith("> "): elif (v or "").startswith("> "):
var[dep][i] = var[dep][i].replace("> ", ">> ") var[dep][i] = var[dep][i].replace("> ", ">> ")
rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
debian_cmp_remap(rdepends) debian_cmp_remap(rdepends)
for dep in list(rdepends.keys()): for dep in list(rdepends.keys()):
if dep == pkg: if dep == pkg:
@ -239,20 +239,20 @@ python do_package_deb () {
continue continue
if '*' in dep: if '*' in dep:
del rdepends[dep] del rdepends[dep]
rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "")
debian_cmp_remap(rrecommends) debian_cmp_remap(rrecommends)
for dep in list(rrecommends.keys()): for dep in list(rrecommends.keys()):
if '*' in dep: if '*' in dep:
del rrecommends[dep] del rrecommends[dep]
rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "")
debian_cmp_remap(rsuggests) debian_cmp_remap(rsuggests)
# Deliberately drop version information here, not wanted/supported by deb # Deliberately drop version information here, not wanted/supported by deb
rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), [])
rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0]))
debian_cmp_remap(rprovides) debian_cmp_remap(rprovides)
rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "")
debian_cmp_remap(rreplaces) debian_cmp_remap(rreplaces)
rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "")
debian_cmp_remap(rconflicts) debian_cmp_remap(rconflicts)
if rdepends: if rdepends:
ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends))
@ -269,7 +269,7 @@ python do_package_deb () {
ctrlfile.close() ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]: for script in ["preinst", "postinst", "prerm", "postrm"]:
scriptvar = localdata.getVar('pkg_%s' % script, True) scriptvar = localdata.getVar('pkg_%s' % script)
if not scriptvar: if not scriptvar:
continue continue
scriptvar = scriptvar.strip() scriptvar = scriptvar.strip()
@ -308,7 +308,7 @@ python do_package_deb () {
conffiles.close() conffiles.close()
os.chdir(basedir) os.chdir(basedir)
ret = subprocess.call("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH", True), root, pkgoutdir), shell=True) ret = subprocess.call("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH"), root, pkgoutdir), shell=True)
if ret != 0: if ret != 0:
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
bb.fatal("dpkg-deb execution failed") bb.fatal("dpkg-deb execution failed")
@ -328,7 +328,7 @@ do_package_write_deb[sstate-inputdirs] = "${PKGWRITEDIRDEB}"
do_package_write_deb[sstate-outputdirs] = "${DEPLOY_DIR_DEB}" do_package_write_deb[sstate-outputdirs] = "${DEPLOY_DIR_DEB}"
python do_package_write_deb_setscene () { python do_package_write_deb_setscene () {
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK):
os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"))
@ -338,7 +338,7 @@ python do_package_write_deb_setscene () {
addtask do_package_write_deb_setscene addtask do_package_write_deb_setscene
python () { python () {
if d.getVar('PACKAGES', True) != '': if d.getVar('PACKAGES') != '':
deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
d.appendVarFlag('do_package_write_deb', 'depends', deps) d.appendVarFlag('do_package_write_deb', 'depends', deps)
d.setVarFlag('do_package_write_deb', 'fakeroot', "1") d.setVarFlag('do_package_write_deb', 'fakeroot', "1")

View File

@ -11,8 +11,8 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
OPKGBUILDCMD ??= "opkg-build" OPKGBUILDCMD ??= "opkg-build"
OPKG_ARGS += "--force_postinstall --prefer-arch-to-version" OPKG_ARGS += "--force_postinstall --prefer-arch-to-version"
OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}"
OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE', True) or "").split())][(d.getVar("PACKAGE_EXCLUDE", True) or "") != ""]}" OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE') or "").split())][(d.getVar("PACKAGE_EXCLUDE") or "") != ""]}"
OPKGLIBDIR = "${localstatedir}/lib" OPKGLIBDIR = "${localstatedir}/lib"
@ -24,15 +24,15 @@ python do_package_ipk () {
oldcwd = os.getcwd() oldcwd = os.getcwd()
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
outdir = d.getVar('PKGWRITEDIRIPK', True) outdir = d.getVar('PKGWRITEDIRIPK')
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
if not workdir or not outdir or not tmpdir: if not workdir or not outdir or not tmpdir:
bb.error("Variables incorrectly set, unable to package") bb.error("Variables incorrectly set, unable to package")
return return
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
if not packages or packages == '': if not packages or packages == '':
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
@ -56,7 +56,7 @@ python do_package_ipk () {
localdata.setVar('ROOT', '') localdata.setVar('ROOT', '')
localdata.setVar('ROOT_%s' % pkg, root) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = localdata.getVar('PKG_%s' % pkg, True) pkgname = localdata.getVar('PKG_%s' % pkg)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
localdata.setVar('PKG', pkgname) localdata.setVar('PKG', pkgname)
@ -65,7 +65,7 @@ python do_package_ipk () {
bb.data.update_data(localdata) bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root)) basedir = os.path.join(os.path.dirname(root))
arch = localdata.getVar('PACKAGE_ARCH', True) arch = localdata.getVar('PACKAGE_ARCH')
if localdata.getVar('IPK_HIERARCHICAL_FEED', False) == "1": if localdata.getVar('IPK_HIERARCHICAL_FEED', False) == "1":
# Spread packages across subdirectories so each isn't too crowded # Spread packages across subdirectories so each isn't too crowded
@ -98,7 +98,7 @@ python do_package_ipk () {
from glob import glob from glob import glob
g = glob('*') g = glob('*')
if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": if not g and localdata.getVar('ALLOW_EMPTY', False) != "1":
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
continue continue
@ -111,7 +111,7 @@ python do_package_ipk () {
bb.fatal("unable to open control file for writing") bb.fatal("unable to open control file for writing")
fields = [] fields = []
pe = d.getVar('PKGE', True) pe = d.getVar('PKGE')
if pe and int(pe) > 0: if pe and int(pe) > 0:
fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']])
else: else:
@ -123,13 +123,13 @@ python do_package_ipk () {
fields.append(["License: %s\n", ['LICENSE']]) fields.append(["License: %s\n", ['LICENSE']])
fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']]) fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']])
fields.append(["OE: %s\n", ['PN']]) fields.append(["OE: %s\n", ['PN']])
if d.getVar('HOMEPAGE', True): if d.getVar('HOMEPAGE'):
fields.append(["Homepage: %s\n", ['HOMEPAGE']]) fields.append(["Homepage: %s\n", ['HOMEPAGE']])
def pullData(l, d): def pullData(l, d):
l2 = [] l2 = []
for i in l: for i in l:
l2.append(d.getVar(i, True)) l2.append(d.getVar(i))
return l2 return l2
ctrlfile.write("Package: %s\n" % pkgname) ctrlfile.write("Package: %s\n" % pkgname)
@ -141,9 +141,9 @@ python do_package_ipk () {
raise KeyError(f) raise KeyError(f)
# Special behavior for description... # Special behavior for description...
if 'DESCRIPTION' in fs: if 'DESCRIPTION' in fs:
summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "."
ctrlfile.write('Description: %s\n' % summary) ctrlfile.write('Description: %s\n' % summary)
description = localdata.getVar('DESCRIPTION', True) or "." description = localdata.getVar('DESCRIPTION') or "."
description = textwrap.dedent(description).strip() description = textwrap.dedent(description).strip()
if '\\n' in description: if '\\n' in description:
# Manually indent # Manually indent
@ -185,19 +185,19 @@ python do_package_ipk () {
elif (v or "").startswith("> "): elif (v or "").startswith("> "):
var[dep][i] = var[dep][i].replace("> ", ">> ") var[dep][i] = var[dep][i].replace("> ", ">> ")
rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
debian_cmp_remap(rdepends) debian_cmp_remap(rdepends)
rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "")
debian_cmp_remap(rrecommends) debian_cmp_remap(rrecommends)
rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "")
debian_cmp_remap(rsuggests) debian_cmp_remap(rsuggests)
# Deliberately drop version information here, not wanted/supported by ipk # Deliberately drop version information here, not wanted/supported by ipk
rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), [])
rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0]))
debian_cmp_remap(rprovides) debian_cmp_remap(rprovides)
rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "")
debian_cmp_remap(rreplaces) debian_cmp_remap(rreplaces)
rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "")
debian_cmp_remap(rconflicts) debian_cmp_remap(rconflicts)
if rdepends: if rdepends:
@ -212,14 +212,14 @@ python do_package_ipk () {
ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces)) ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
if rconflicts: if rconflicts:
ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts)) ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
src_uri = localdata.getVar("SRC_URI", True).strip() or "None" src_uri = localdata.getVar("SRC_URI").strip() or "None"
if src_uri: if src_uri:
src_uri = re.sub("\s+", " ", src_uri) src_uri = re.sub("\s+", " ", src_uri)
ctrlfile.write("Source: %s\n" % " ".join(src_uri.split())) ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
ctrlfile.close() ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]: for script in ["preinst", "postinst", "prerm", "postrm"]:
scriptvar = localdata.getVar('pkg_%s' % script, True) scriptvar = localdata.getVar('pkg_%s' % script)
if not scriptvar: if not scriptvar:
continue continue
try: try:
@ -244,15 +244,15 @@ python do_package_ipk () {
conffiles.close() conffiles.close()
os.chdir(basedir) os.chdir(basedir)
ret = subprocess.call("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", True), ret = subprocess.call("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH"),
d.getVar("OPKGBUILDCMD", True), pkg, pkgoutdir), shell=True) d.getVar("OPKGBUILDCMD"), pkg, pkgoutdir), shell=True)
if ret != 0: if ret != 0:
bb.utils.unlockfile(lf) bb.utils.unlockfile(lf)
bb.fatal("opkg-build execution failed") bb.fatal("opkg-build execution failed")
if d.getVar('IPK_SIGN_PACKAGES', True) == '1': if d.getVar('IPK_SIGN_PACKAGES') == '1':
ipkver = "%s-%s" % (d.getVar('PKGV', True), d.getVar('PKGR', True)) ipkver = "%s-%s" % (d.getVar('PKGV'), d.getVar('PKGR'))
ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH', True)) ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH'))
sign_ipk(d, ipk_to_sign) sign_ipk(d, ipk_to_sign)
cleanupcontrol(root) cleanupcontrol(root)
@ -268,7 +268,7 @@ do_package_write_ipk[sstate-inputdirs] = "${PKGWRITEDIRIPK}"
do_package_write_ipk[sstate-outputdirs] = "${DEPLOY_DIR_IPK}" do_package_write_ipk[sstate-outputdirs] = "${DEPLOY_DIR_IPK}"
python do_package_write_ipk_setscene () { python do_package_write_ipk_setscene () {
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK): if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK):
os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN")) os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"))
@ -278,7 +278,7 @@ python do_package_write_ipk_setscene () {
addtask do_package_write_ipk_setscene addtask do_package_write_ipk_setscene
python () { python () {
if d.getVar('PACKAGES', True) != '': if d.getVar('PACKAGES') != '':
deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
d.appendVarFlag('do_package_write_ipk', 'depends', deps) d.appendVarFlag('do_package_write_ipk', 'depends', deps)
d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")

View File

@ -13,9 +13,9 @@ MERGEPERFILEDEPS = "1"
# Construct per file dependencies file # Construct per file dependencies file
def write_rpm_perfiledata(srcname, d): def write_rpm_perfiledata(srcname, d):
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
pkgd = d.getVar('PKGD', True) pkgd = d.getVar('PKGD')
def dump_filerdeps(varname, outfile, d): def dump_filerdeps(varname, outfile, d):
outfile.write("#!/usr/bin/env python\n\n") outfile.write("#!/usr/bin/env python\n\n")
@ -23,10 +23,10 @@ def write_rpm_perfiledata(srcname, d):
outfile.write('deps = {\n') outfile.write('deps = {\n')
for pkg in packages.split(): for pkg in packages.split():
dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
dependsflist = (d.getVar(dependsflist_key, True) or "") dependsflist = (d.getVar(dependsflist_key) or "")
for dfile in dependsflist.split(): for dfile in dependsflist.split():
key = "FILE" + varname + "_" + dfile + "_" + pkg key = "FILE" + varname + "_" + dfile + "_" + pkg
depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "") depends_dict = bb.utils.explode_dep_versions(d.getVar(key) or "")
file = dfile.replace("@underscore@", "_") file = dfile.replace("@underscore@", "_")
file = file.replace("@closebrace@", "]") file = file.replace("@closebrace@", "]")
file = file.replace("@openbrace@", "[") file = file.replace("@openbrace@", "[")
@ -87,14 +87,14 @@ python write_specfile () {
# append information for logs and patches to %prep # append information for logs and patches to %prep
def add_prep(d,spec_files_bottom): def add_prep(d,spec_files_bottom):
if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) ) spec_files_bottom.append('%%prep -n %s' % d.getVar('PN') )
spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
spec_files_bottom.append('') spec_files_bottom.append('')
# append the name of tarball to key word 'SOURCE' in xxx.spec. # append the name of tarball to key word 'SOURCE' in xxx.spec.
def tail_source(d): def tail_source(d):
if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) ar_outdir = d.getVar('ARCHIVER_OUTDIR')
if not os.path.exists(ar_outdir): if not os.path.exists(ar_outdir):
return return
source_list = os.listdir(ar_outdir) source_list = os.listdir(ar_outdir)
@ -110,7 +110,7 @@ python write_specfile () {
# We need a simple way to remove the MLPREFIX from the package name, # We need a simple way to remove the MLPREFIX from the package name,
# and dependency information... # and dependency information...
def strip_multilib(name, d): def strip_multilib(name, d):
multilibs = d.getVar('MULTILIBS', True) or "" multilibs = d.getVar('MULTILIBS') or ""
for ext in multilibs.split(): for ext in multilibs.split():
eext = ext.split(':') eext = ext.split(':')
if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0: if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0:
@ -124,7 +124,7 @@ python write_specfile () {
newdeps[strip_multilib(dep, d)] = depends[dep] newdeps[strip_multilib(dep, d)] = depends[dep]
return bb.utils.join_deps(newdeps) return bb.utils.join_deps(newdeps)
# ml = d.getVar("MLPREFIX", True) # ml = d.getVar("MLPREFIX")
# if ml and name and len(ml) != 0 and name.find(ml) == 0: # if ml and name and len(ml) != 0 and name.find(ml) == 0:
# return ml.join(name.split(ml, 1)[1:]) # return ml.join(name.split(ml, 1)[1:])
# return name # return name
@ -144,7 +144,7 @@ python write_specfile () {
# after renaming we cannot look up the dependencies in the packagedata # after renaming we cannot look up the dependencies in the packagedata
# store. # store.
def translate_vers(varname, d): def translate_vers(varname, d):
depends = d.getVar(varname, True) depends = d.getVar(varname)
if depends: if depends:
depends_dict = bb.utils.explode_dep_versions2(depends) depends_dict = bb.utils.explode_dep_versions2(depends)
newdeps_dict = {} newdeps_dict = {}
@ -248,10 +248,10 @@ python write_specfile () {
def get_perfile(varname, pkg, d): def get_perfile(varname, pkg, d):
deps = [] deps = []
dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
dependsflist = (d.getVar(dependsflist_key, True) or "") dependsflist = (d.getVar(dependsflist_key) or "")
for dfile in dependsflist.split(): for dfile in dependsflist.split():
key = "FILE" + varname + "_" + dfile + "_" + pkg key = "FILE" + varname + "_" + dfile + "_" + pkg
depends = d.getVar(key, True) depends = d.getVar(key)
if depends: if depends:
deps.append(depends) deps.append(depends)
return " ".join(deps) return " ".join(deps)
@ -269,33 +269,33 @@ python write_specfile () {
else: else:
spec_preamble.append('%s' % textwrap.fill(dedent_text, width=75)) spec_preamble.append('%s' % textwrap.fill(dedent_text, width=75))
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
if not packages or packages == '': if not packages or packages == '':
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
if not pkgdest: if not pkgdest:
bb.fatal("No PKGDEST") bb.fatal("No PKGDEST")
outspecfile = d.getVar('OUTSPECFILE', True) outspecfile = d.getVar('OUTSPECFILE')
if not outspecfile: if not outspecfile:
bb.fatal("No OUTSPECFILE") bb.fatal("No OUTSPECFILE")
# Construct the SPEC file... # Construct the SPEC file...
srcname = strip_multilib(d.getVar('PN', True), d) srcname = strip_multilib(d.getVar('PN'), d)
srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".") srcsummary = (d.getVar('SUMMARY') or d.getVar('DESCRIPTION') or ".")
srcversion = d.getVar('PKGV', True).replace('-', '+') srcversion = d.getVar('PKGV').replace('-', '+')
srcrelease = d.getVar('PKGR', True) srcrelease = d.getVar('PKGR')
srcepoch = (d.getVar('PKGE', True) or "") srcepoch = (d.getVar('PKGE') or "")
srclicense = d.getVar('LICENSE', True) srclicense = d.getVar('LICENSE')
srcsection = d.getVar('SECTION', True) srcsection = d.getVar('SECTION')
srcmaintainer = d.getVar('MAINTAINER', True) srcmaintainer = d.getVar('MAINTAINER')
srchomepage = d.getVar('HOMEPAGE', True) srchomepage = d.getVar('HOMEPAGE')
srcdescription = d.getVar('DESCRIPTION', True) or "." srcdescription = d.getVar('DESCRIPTION') or "."
srccustomtagschunk = get_package_additional_metadata("rpm", d) srccustomtagschunk = get_package_additional_metadata("rpm", d)
srcdepends = strip_multilib_deps(d.getVar('DEPENDS', True), d) srcdepends = strip_multilib_deps(d.getVar('DEPENDS'), d)
srcrdepends = [] srcrdepends = []
srcrrecommends = [] srcrrecommends = []
srcrsuggests = [] srcrsuggests = []
@ -318,8 +318,8 @@ python write_specfile () {
spec_files_top = [] spec_files_top = []
spec_files_bottom = [] spec_files_bottom = []
perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0"
extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA", True) or "0") == "1" extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA") or "0") == "1"
for pkg in packages.split(): for pkg in packages.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
@ -328,7 +328,7 @@ python write_specfile () {
localdata.setVar('ROOT', '') localdata.setVar('ROOT', '')
localdata.setVar('ROOT_%s' % pkg, root) localdata.setVar('ROOT_%s' % pkg, root)
pkgname = localdata.getVar('PKG_%s' % pkg, True) pkgname = localdata.getVar('PKG_%s' % pkg)
if not pkgname: if not pkgname:
pkgname = pkg pkgname = pkg
localdata.setVar('PKG', pkgname) localdata.setVar('PKG', pkgname)
@ -338,19 +338,19 @@ python write_specfile () {
bb.data.update_data(localdata) bb.data.update_data(localdata)
conffiles = get_conffiles(pkg, d) conffiles = get_conffiles(pkg, d)
dirfiles = localdata.getVar('DIRFILES', True) dirfiles = localdata.getVar('DIRFILES')
if dirfiles is not None: if dirfiles is not None:
dirfiles = dirfiles.split() dirfiles = dirfiles.split()
splitname = strip_multilib(pkgname, d) splitname = strip_multilib(pkgname, d)
splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".") splitsummary = (localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or ".")
splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+') splitversion = (localdata.getVar('PKGV') or "").replace('-', '+')
splitrelease = (localdata.getVar('PKGR', True) or "") splitrelease = (localdata.getVar('PKGR') or "")
splitepoch = (localdata.getVar('PKGE', True) or "") splitepoch = (localdata.getVar('PKGE') or "")
splitlicense = (localdata.getVar('LICENSE', True) or "") splitlicense = (localdata.getVar('LICENSE') or "")
splitsection = (localdata.getVar('SECTION', True) or "") splitsection = (localdata.getVar('SECTION') or "")
splitdescription = (localdata.getVar('DESCRIPTION', True) or ".") splitdescription = (localdata.getVar('DESCRIPTION') or ".")
splitcustomtagschunk = get_package_additional_metadata("rpm", localdata) splitcustomtagschunk = get_package_additional_metadata("rpm", localdata)
translate_vers('RDEPENDS', localdata) translate_vers('RDEPENDS', localdata)
@ -363,18 +363,18 @@ python write_specfile () {
# Map the dependencies into their final form # Map the dependencies into their final form
mapping_rename_hook(localdata) mapping_rename_hook(localdata)
splitrdepends = strip_multilib_deps(localdata.getVar('RDEPENDS', True), d) splitrdepends = strip_multilib_deps(localdata.getVar('RDEPENDS'), d)
splitrrecommends = strip_multilib_deps(localdata.getVar('RRECOMMENDS', True), d) splitrrecommends = strip_multilib_deps(localdata.getVar('RRECOMMENDS'), d)
splitrsuggests = strip_multilib_deps(localdata.getVar('RSUGGESTS', True), d) splitrsuggests = strip_multilib_deps(localdata.getVar('RSUGGESTS'), d)
splitrprovides = strip_multilib_deps(localdata.getVar('RPROVIDES', True), d) splitrprovides = strip_multilib_deps(localdata.getVar('RPROVIDES'), d)
splitrreplaces = strip_multilib_deps(localdata.getVar('RREPLACES', True), d) splitrreplaces = strip_multilib_deps(localdata.getVar('RREPLACES'), d)
splitrconflicts = strip_multilib_deps(localdata.getVar('RCONFLICTS', True), d) splitrconflicts = strip_multilib_deps(localdata.getVar('RCONFLICTS'), d)
splitrobsoletes = [] splitrobsoletes = []
splitrpreinst = localdata.getVar('pkg_preinst', True) splitrpreinst = localdata.getVar('pkg_preinst')
splitrpostinst = localdata.getVar('pkg_postinst', True) splitrpostinst = localdata.getVar('pkg_postinst')
splitrprerm = localdata.getVar('pkg_prerm', True) splitrprerm = localdata.getVar('pkg_prerm')
splitrpostrm = localdata.getVar('pkg_postrm', True) splitrpostrm = localdata.getVar('pkg_postrm')
if not perfiledeps: if not perfiledeps:
@ -621,7 +621,7 @@ python write_specfile () {
# RPMSPEC_PREAMBLE is a way to add arbitrary text to the top # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top
# of the generated spec file # of the generated spec file
external_preamble = d.getVar("RPMSPEC_PREAMBLE", True) external_preamble = d.getVar("RPMSPEC_PREAMBLE")
if external_preamble: if external_preamble:
specfile.write(external_preamble + "\n") specfile.write(external_preamble + "\n")
@ -652,20 +652,20 @@ python do_package_rpm () {
# We need a simple way to remove the MLPREFIX from the package name, # We need a simple way to remove the MLPREFIX from the package name,
# and dependency information... # and dependency information...
def strip_multilib(name, d): def strip_multilib(name, d):
ml = d.getVar("MLPREFIX", True) ml = d.getVar("MLPREFIX")
if ml and name and len(ml) != 0 and name.find(ml) >= 0: if ml and name and len(ml) != 0 and name.find(ml) >= 0:
return "".join(name.split(ml)) return "".join(name.split(ml))
return name return name
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
pkgd = d.getVar('PKGD', True) pkgd = d.getVar('PKGD')
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
if not workdir or not pkgd or not tmpdir: if not workdir or not pkgd or not tmpdir:
bb.error("Variables incorrectly set, unable to package") bb.error("Variables incorrectly set, unable to package")
return return
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
if not packages or packages == '': if not packages or packages == '':
bb.debug(1, "No packages; nothing to do") bb.debug(1, "No packages; nothing to do")
return return
@ -674,31 +674,31 @@ python do_package_rpm () {
# If the spec file already exist, and has not been stored into # If the spec file already exist, and has not been stored into
# pseudo's files.db, it maybe cause rpmbuild src.rpm fail, # pseudo's files.db, it maybe cause rpmbuild src.rpm fail,
# so remove it before doing rpmbuild src.rpm. # so remove it before doing rpmbuild src.rpm.
srcname = strip_multilib(d.getVar('PN', True), d) srcname = strip_multilib(d.getVar('PN'), d)
outspecfile = workdir + "/" + srcname + ".spec" outspecfile = workdir + "/" + srcname + ".spec"
if os.path.isfile(outspecfile): if os.path.isfile(outspecfile):
os.remove(outspecfile) os.remove(outspecfile)
d.setVar('OUTSPECFILE', outspecfile) d.setVar('OUTSPECFILE', outspecfile)
bb.build.exec_func('write_specfile', d) bb.build.exec_func('write_specfile', d)
perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0"
if perfiledeps: if perfiledeps:
outdepends, outprovides = write_rpm_perfiledata(srcname, d) outdepends, outprovides = write_rpm_perfiledata(srcname, d)
# Setup the rpmbuild arguments... # Setup the rpmbuild arguments...
rpmbuild = d.getVar('RPMBUILD', True) rpmbuild = d.getVar('RPMBUILD')
targetsys = d.getVar('TARGET_SYS', True) targetsys = d.getVar('TARGET_SYS')
targetvendor = d.getVar('HOST_VENDOR', True) targetvendor = d.getVar('HOST_VENDOR')
package_arch = (d.getVar('PACKAGE_ARCH', True) or "").replace("-", "_") package_arch = (d.getVar('PACKAGE_ARCH') or "").replace("-", "_")
sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX', True) or "nativesdk").replace("-", "_") sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX') or "nativesdk").replace("-", "_")
if package_arch not in "all any noarch".split() and not package_arch.endswith(sdkpkgsuffix): if package_arch not in "all any noarch".split() and not package_arch.endswith(sdkpkgsuffix):
ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_") ml_prefix = (d.getVar('MLPREFIX') or "").replace("-", "_")
d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch) d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
else: else:
d.setVar('PACKAGE_ARCH_EXTEND', package_arch) d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}')
d.setVar('RPM_PKGWRITEDIR', pkgwritedir) d.setVar('RPM_PKGWRITEDIR', pkgwritedir)
bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR', True)) bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR'))
pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-${HOST_OS}') pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-${HOST_OS}')
magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc')
bb.utils.mkdirhier(pkgwritedir) bb.utils.mkdirhier(pkgwritedir)
@ -707,7 +707,7 @@ python do_package_rpm () {
cmd = rpmbuild cmd = rpmbuild
cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd
cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'" cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'"
cmd = cmd + " --define '_builddir " + d.getVar('S', True) + "'" cmd = cmd + " --define '_builddir " + d.getVar('S') + "'"
cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'" cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'"
cmd = cmd + " --define '_use_internal_dependency_generator 0'" cmd = cmd + " --define '_use_internal_dependency_generator 0'"
if perfiledeps: if perfiledeps:
@ -721,8 +721,8 @@ python do_package_rpm () {
cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'" cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'"
cmd = cmd + " --define '_tmppath " + workdir + "'" cmd = cmd + " --define '_tmppath " + workdir + "'"
if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'"
cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR') + "'"
cmdsrpm = cmdsrpm + " -bs " + outspecfile cmdsrpm = cmdsrpm + " -bs " + outspecfile
# Build the .src.rpm # Build the .src.rpm
d.setVar('SBUILDSPEC', cmdsrpm + "\n") d.setVar('SBUILDSPEC', cmdsrpm + "\n")
@ -735,12 +735,12 @@ python do_package_rpm () {
d.setVarFlag('BUILDSPEC', 'func', '1') d.setVarFlag('BUILDSPEC', 'func', '1')
bb.build.exec_func('BUILDSPEC', d) bb.build.exec_func('BUILDSPEC', d)
if d.getVar('RPM_SIGN_PACKAGES', True) == '1': if d.getVar('RPM_SIGN_PACKAGES') == '1':
bb.build.exec_func("sign_rpm", d) bb.build.exec_func("sign_rpm", d)
} }
python () { python () {
if d.getVar('PACKAGES', True) != '': if d.getVar('PACKAGES') != '':
deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot'
d.appendVarFlag('do_package_write_rpm', 'depends', deps) d.appendVarFlag('do_package_write_rpm', 'depends', deps)
d.setVarFlag('do_package_write_rpm', 'fakeroot', '1') d.setVarFlag('do_package_write_rpm', 'fakeroot', '1')

View File

@ -7,27 +7,27 @@ python do_package_tar () {
oldcwd = os.getcwd() oldcwd = os.getcwd()
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
outdir = d.getVar('DEPLOY_DIR_TAR', True) outdir = d.getVar('DEPLOY_DIR_TAR')
if not outdir: if not outdir:
bb.error("DEPLOY_DIR_TAR not defined, unable to package") bb.error("DEPLOY_DIR_TAR not defined, unable to package")
return return
dvar = d.getVar('D', True) dvar = d.getVar('D')
if not dvar: if not dvar:
bb.error("D not defined, unable to package") bb.error("D not defined, unable to package")
return return
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
if not packages: if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package") bb.debug(1, "PACKAGES not defined, nothing to package")
return return
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST')
bb.utils.mkdirhier(outdir) bb.utils.mkdirhier(outdir)
bb.utils.mkdirhier(dvar) bb.utils.mkdirhier(dvar)
@ -46,7 +46,7 @@ python do_package_tar () {
os.chdir(root) os.chdir(root)
dlist = os.listdir(root) dlist = os.listdir(root)
if not dlist: if not dlist:
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR')))
continue continue
args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split() args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split()
ret = subprocess.call(args + [tarfn] + dlist) ret = subprocess.call(args + [tarfn] + dlist)
@ -57,7 +57,7 @@ python do_package_tar () {
} }
python () { python () {
if d.getVar('PACKAGES', True) != '': if d.getVar('PACKAGES') != '':
deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split() deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split()
deps.append('tar-native:do_populate_sysroot') deps.append('tar-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')

View File

@ -2,10 +2,10 @@ python read_subpackage_metadata () {
import oe.packagedata import oe.packagedata
vars = { vars = {
"PN" : d.getVar('PN', True), "PN" : d.getVar('PN'),
"PE" : d.getVar('PE', True), "PE" : d.getVar('PE'),
"PV" : d.getVar('PV', True), "PV" : d.getVar('PV'),
"PR" : d.getVar('PR', True), "PR" : d.getVar('PR'),
} }
data = oe.packagedata.read_pkgdata(vars["PN"], d) data = oe.packagedata.read_pkgdata(vars["PN"], d)
@ -13,7 +13,7 @@ python read_subpackage_metadata () {
for key in data.keys(): for key in data.keys():
d.setVar(key, data[key]) d.setVar(key, data[key])
for pkg in d.getVar('PACKAGES', True).split(): for pkg in d.getVar('PACKAGES').split():
sdata = oe.packagedata.read_subpkgdata(pkg, d) sdata = oe.packagedata.read_subpkgdata(pkg, d)
for key in sdata.keys(): for key in sdata.keys():
if key in vars: if key in vars:

View File

@ -31,7 +31,7 @@ python() {
# This assumes that the package_write task is called package_write_<pkgtype> # This assumes that the package_write task is called package_write_<pkgtype>
# and that the directory in which packages should be written is # and that the directory in which packages should be written is
# pointed to by the variable DEPLOY_DIR_<PKGTYPE> # pointed to by the variable DEPLOY_DIR_<PKGTYPE>
for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split():
if pkgclass.startswith('package_'): if pkgclass.startswith('package_'):
pkgtype = pkgclass.split('_', 1)[1] pkgtype = pkgclass.split('_', 1)[1]
pkgwritefunc = 'do_package_write_%s' % pkgtype pkgwritefunc = 'do_package_write_%s' % pkgtype
@ -71,7 +71,7 @@ python() {
# This isn't the real task function - it's a template that we use in the # This isn't the real task function - it's a template that we use in the
# anonymous python code above # anonymous python code above
fakeroot python do_package_compare () { fakeroot python do_package_compare () {
currenttask = d.getVar('BB_CURRENTTASK', True) currenttask = d.getVar('BB_CURRENTTASK')
pkgtype = currenttask.rsplit('_', 1)[1] pkgtype = currenttask.rsplit('_', 1)[1]
package_compare_impl(pkgtype, d) package_compare_impl(pkgtype, d)
} }
@ -83,12 +83,12 @@ def package_compare_impl(pkgtype, d):
import subprocess import subprocess
import oe.sstatesig import oe.sstatesig
pn = d.getVar('PN', True) pn = d.getVar('PN')
deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True)
prepath = deploydir + '-prediff/' prepath = deploydir + '-prediff/'
# Find out PKGR values are # Find out PKGR values are
pkgdatadir = d.getVar('PKGDATA_DIR', True) pkgdatadir = d.getVar('PKGDATA_DIR')
packages = [] packages = []
try: try:
with open(os.path.join(pkgdatadir, pn), 'r') as f: with open(os.path.join(pkgdatadir, pn), 'r') as f:
@ -138,7 +138,7 @@ def package_compare_impl(pkgtype, d):
files = [] files = []
docopy = False docopy = False
manifest, _ = oe.sstatesig.sstate_get_manifest_filename(pkgwritetask, d) manifest, _ = oe.sstatesig.sstate_get_manifest_filename(pkgwritetask, d)
mlprefix = d.getVar('MLPREFIX', True) mlprefix = d.getVar('MLPREFIX')
# Copy recipe's all packages if one of the packages are different to make # Copy recipe's all packages if one of the packages are different to make
# they have the same PR. # they have the same PR.
with open(manifest, 'r') as f: with open(manifest, 'r') as f:
@ -215,7 +215,7 @@ def package_compare_impl(pkgtype, d):
# multilib), they're identical in theory, but sstate.bbclass # multilib), they're identical in theory, but sstate.bbclass
# copies it again, so keep align with that. # copies it again, so keep align with that.
if os.path.exists(destpath) and pkgtype == 'rpm' \ if os.path.exists(destpath) and pkgtype == 'rpm' \
and d.getVar('PACKAGE_ARCH', True) == 'all': and d.getVar('PACKAGE_ARCH') == 'all':
os.unlink(destpath) os.unlink(destpath)
if (os.stat(srcpath).st_dev == os.stat(destdir).st_dev): if (os.stat(srcpath).st_dev == os.stat(destdir).st_dev):
# Use a hard link to save space # Use a hard link to save space
@ -229,7 +229,7 @@ def package_compare_impl(pkgtype, d):
do_cleansstate[postfuncs] += "pfs_cleanpkgs" do_cleansstate[postfuncs] += "pfs_cleanpkgs"
python pfs_cleanpkgs () { python pfs_cleanpkgs () {
import errno import errno
for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split():
if pkgclass.startswith('package_'): if pkgclass.startswith('package_'):
pkgtype = pkgclass.split('_', 1)[1] pkgtype = pkgclass.split('_', 1)[1]
deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True)

View File

@ -16,15 +16,15 @@ PACKAGE_ARCH_EXPANDED := "${PACKAGE_ARCH}"
LICENSE ?= "MIT" LICENSE ?= "MIT"
inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED', True) == 'all', 'allarch', '')} inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED') == 'all', 'allarch', '')}
# This automatically adds -dbg and -dev flavours of all PACKAGES # This automatically adds -dbg and -dev flavours of all PACKAGES
# to the list. Their dependencies (RRECOMMENDS) are handled as usual # to the list. Their dependencies (RRECOMMENDS) are handled as usual
# by package_depchains in a following step. # by package_depchains in a following step.
# Also mark all packages as ALLOW_EMPTY # Also mark all packages as ALLOW_EMPTY
python () { python () {
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES').split()
if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY', True) != '1': if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY') != '1':
types = ['', '-dbg', '-dev'] types = ['', '-dbg', '-dev']
if bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): if bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d):
types.append('-ptest') types.append('-ptest')
@ -49,7 +49,7 @@ do_install[noexec] = "1"
do_populate_sysroot[noexec] = "1" do_populate_sysroot[noexec] = "1"
python () { python () {
initman = d.getVar("VIRTUAL-RUNTIME_init_manager", True) initman = d.getVar("VIRTUAL-RUNTIME_init_manager")
if initman and initman in ['sysvinit', 'systemd'] and not bb.utils.contains('DISTRO_FEATURES', initman, True, False, d): if initman and initman in ['sysvinit', 'systemd'] and not bb.utils.contains('DISTRO_FEATURES', initman, True, False, d):
bb.fatal("Please ensure that your setting of VIRTUAL-RUNTIME_init_manager (%s) matches the entries enabled in DISTRO_FEATURES" % initman) bb.fatal("Please ensure that your setting of VIRTUAL-RUNTIME_init_manager (%s) matches the entries enabled in DISTRO_FEATURES" % initman)
} }

View File

@ -11,7 +11,7 @@ PATCH_GIT_USER_EMAIL ?= "oe.patch@oe"
inherit terminal inherit terminal
python () { python () {
if d.getVar('PATCHTOOL', True) == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS', True) == '1': if d.getVar('PATCHTOOL') == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS') == '1':
tasks = list(filter(lambda k: d.getVarFlag(k, "task", True), d.keys())) tasks = list(filter(lambda k: d.getVarFlag(k, "task", True), d.keys()))
extratasks = [] extratasks = []
def follow_chain(task, endtask, chain=None): def follow_chain(task, endtask, chain=None):
@ -44,8 +44,8 @@ python () {
python patch_task_patch_prefunc() { python patch_task_patch_prefunc() {
# Prefunc for do_patch # Prefunc for do_patch
func = d.getVar('BB_RUNTASK', True) func = d.getVar('BB_RUNTASK')
srcsubdir = d.getVar('S', True) srcsubdir = d.getVar('S')
patchdir = os.path.join(srcsubdir, 'patches') patchdir = os.path.join(srcsubdir, 'patches')
if os.path.exists(patchdir): if os.path.exists(patchdir):
@ -59,12 +59,12 @@ python patch_task_postfunc() {
# Prefunc for task functions between do_unpack and do_patch # Prefunc for task functions between do_unpack and do_patch
import oe.patch import oe.patch
import shutil import shutil
func = d.getVar('BB_RUNTASK', True) func = d.getVar('BB_RUNTASK')
srcsubdir = d.getVar('S', True) srcsubdir = d.getVar('S')
if os.path.exists(srcsubdir): if os.path.exists(srcsubdir):
if func == 'do_patch': if func == 'do_patch':
haspatches = (d.getVar('PATCH_HAS_PATCHES_DIR', True) == '1') haspatches = (d.getVar('PATCH_HAS_PATCHES_DIR') == '1')
patchdir = os.path.join(srcsubdir, 'patches') patchdir = os.path.join(srcsubdir, 'patches')
if os.path.exists(patchdir): if os.path.exists(patchdir):
shutil.rmtree(patchdir) shutil.rmtree(patchdir)
@ -99,20 +99,20 @@ python patch_do_patch() {
"git": oe.patch.GitApplyTree, "git": oe.patch.GitApplyTree,
} }
cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt'] cls = patchsetmap[d.getVar('PATCHTOOL') or 'quilt']
resolvermap = { resolvermap = {
"noop": oe.patch.NOOPResolver, "noop": oe.patch.NOOPResolver,
"user": oe.patch.UserResolver, "user": oe.patch.UserResolver,
} }
rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user'] rcls = resolvermap[d.getVar('PATCHRESOLVE') or 'user']
classes = {} classes = {}
s = d.getVar('S', True) s = d.getVar('S')
os.putenv('PATH', d.getVar('PATH', True)) os.putenv('PATH', d.getVar('PATH'))
# We must use one TMPDIR per process so that the "patch" processes # We must use one TMPDIR per process so that the "patch" processes
# don't generate the same temp file name. # don't generate the same temp file name.

View File

@ -28,20 +28,20 @@ fi
} }
python populate_packages_append() { python populate_packages_append() {
pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES', True).split() pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES').split()
for pkg in pixbuf_pkgs: for pkg in pixbuf_pkgs:
bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg) bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst')
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('pixbufcache_common', True) postinst += d.getVar('pixbufcache_common')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm')
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('pixbufcache_common', True) postrm += d.getVar('pixbufcache_common')
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -11,7 +11,7 @@ COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest'
def complementary_globs(featurevar, d): def complementary_globs(featurevar, d):
all_globs = d.getVarFlags('COMPLEMENTARY_GLOB') all_globs = d.getVarFlags('COMPLEMENTARY_GLOB')
globs = [] globs = []
features = set((d.getVar(featurevar, True) or '').split()) features = set((d.getVar(featurevar) or '').split())
for name, glob in all_globs.items(): for name, glob in all_globs.items():
if name in features: if name in features:
globs.append(glob) globs.append(glob)
@ -57,30 +57,30 @@ SDK_PRE_INSTALL_COMMAND ?= ""
SDK_POST_INSTALL_COMMAND ?= "" SDK_POST_INSTALL_COMMAND ?= ""
SDK_RELOCATE_AFTER_INSTALL ?= "1" SDK_RELOCATE_AFTER_INSTALL ?= "1"
SDKEXTPATH ?= "~/${@d.getVar('DISTRO', True)}_sdk" SDKEXTPATH ?= "~/${@d.getVar('DISTRO')}_sdk"
SDK_TITLE ?= "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} SDK" SDK_TITLE ?= "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} SDK"
SDK_TARGET_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.target.manifest" SDK_TARGET_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.target.manifest"
SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest" SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest"
python write_target_sdk_manifest () { python write_target_sdk_manifest () {
from oe.sdk import sdk_list_installed_packages from oe.sdk import sdk_list_installed_packages
from oe.utils import format_pkg_list from oe.utils import format_pkg_list
sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST", True)) sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST"))
pkgs = sdk_list_installed_packages(d, True) pkgs = sdk_list_installed_packages(d, True)
if not os.path.exists(sdkmanifestdir): if not os.path.exists(sdkmanifestdir):
bb.utils.mkdirhier(sdkmanifestdir) bb.utils.mkdirhier(sdkmanifestdir)
with open(d.getVar('SDK_TARGET_MANIFEST', True), 'w') as output: with open(d.getVar('SDK_TARGET_MANIFEST'), 'w') as output:
output.write(format_pkg_list(pkgs, 'ver')) output.write(format_pkg_list(pkgs, 'ver'))
} }
python write_host_sdk_manifest () { python write_host_sdk_manifest () {
from oe.sdk import sdk_list_installed_packages from oe.sdk import sdk_list_installed_packages
from oe.utils import format_pkg_list from oe.utils import format_pkg_list
sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST", True)) sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST"))
pkgs = sdk_list_installed_packages(d, False) pkgs = sdk_list_installed_packages(d, False)
if not os.path.exists(sdkmanifestdir): if not os.path.exists(sdkmanifestdir):
bb.utils.mkdirhier(sdkmanifestdir) bb.utils.mkdirhier(sdkmanifestdir)
with open(d.getVar('SDK_HOST_MANIFEST', True), 'w') as output: with open(d.getVar('SDK_HOST_MANIFEST'), 'w') as output:
output.write(format_pkg_list(pkgs, 'ver')) output.write(format_pkg_list(pkgs, 'ver'))
} }
@ -93,7 +93,7 @@ def populate_sdk_common(d):
from oe.sdk import populate_sdk from oe.sdk import populate_sdk
from oe.manifest import create_manifest, Manifest from oe.manifest import create_manifest, Manifest
pn = d.getVar('PN', True) pn = d.getVar('PN')
runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d) runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d)
runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d) runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d)
@ -101,13 +101,13 @@ def populate_sdk_common(d):
ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata") ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata")
runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld) runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld)
runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld) runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld)
d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK", True)) d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK"))
d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", True)) d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY"))
# create target/host SDK manifests # create target/host SDK manifests
create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), create_manifest(d, manifest_dir=d.getVar('SDK_DIR'),
manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST) manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST)
create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), create_manifest(d, manifest_dir=d.getVar('SDK_DIR'),
manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET) manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET)
populate_sdk(d) populate_sdk(d)
@ -134,7 +134,7 @@ fakeroot create_sdk_files() {
python check_sdk_sysroots() { python check_sdk_sysroots() {
# Fails build if there are broken or dangling symlinks in SDK sysroots # Fails build if there are broken or dangling symlinks in SDK sysroots
if d.getVar('CHECK_SDK_SYSROOTS', True) != '1': if d.getVar('CHECK_SDK_SYSROOTS') != '1':
# disabled, bail out # disabled, bail out
return return
@ -142,8 +142,8 @@ python check_sdk_sysroots() {
return os.path.abspath(path) return os.path.abspath(path)
# Get scan root # Get scan root
SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT', True), SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT'),
d.getVar('SDKPATH', True))) d.getVar('SDKPATH')))
bb.note('Checking SDK sysroots at ' + SCAN_ROOT) bb.note('Checking SDK sysroots at ' + SCAN_ROOT)
@ -218,7 +218,7 @@ EOF
-e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \ -e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \
-e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \ -e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \
-e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \ -e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \
-e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE", True).replace('&', '\&')}#g' \ -e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE").replace('&', '\&')}#g' \
-e 's#@SDK_VERSION@#${SDK_VERSION}#g' \ -e 's#@SDK_VERSION@#${SDK_VERSION}#g' \
-e '/@SDK_PRE_INSTALL_COMMAND@/d' \ -e '/@SDK_PRE_INSTALL_COMMAND@/d' \
-e '/@SDK_POST_INSTALL_COMMAND@/d' \ -e '/@SDK_POST_INSTALL_COMMAND@/d' \
@ -268,7 +268,7 @@ do_populate_sdk[file-checksums] += "${COREBASE}/meta/files/toolchain-shar-reloca
${COREBASE}/meta/files/toolchain-shar-extract.sh:True" ${COREBASE}/meta/files/toolchain-shar-extract.sh:True"
do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}" do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}"
do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS', True).split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}" do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS').split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}"
do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS', True).split()])}" do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS').split()])}"
do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb" do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb"
addtask populate_sdk addtask populate_sdk

View File

@ -21,7 +21,7 @@ SDK_EXT_task-populate-sdk-ext = "-ext"
# Options are full or minimal # Options are full or minimal
SDK_EXT_TYPE ?= "full" SDK_EXT_TYPE ?= "full"
SDK_INCLUDE_PKGDATA ?= "0" SDK_INCLUDE_PKGDATA ?= "0"
SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE', True) == 'full' else '0'}" SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE') == 'full' else '0'}"
SDK_RECRDEP_TASKS ?= "" SDK_RECRDEP_TASKS ?= ""
@ -43,8 +43,8 @@ SDK_TARGETS ?= "${PN}"
def get_sdk_install_targets(d, images_only=False): def get_sdk_install_targets(d, images_only=False):
sdk_install_targets = '' sdk_install_targets = ''
if images_only or d.getVar('SDK_EXT_TYPE', True) != 'minimal': if images_only or d.getVar('SDK_EXT_TYPE') != 'minimal':
sdk_install_targets = d.getVar('SDK_TARGETS', True) sdk_install_targets = d.getVar('SDK_TARGETS')
depd = d.getVar('BB_TASKDEPDATA', False) depd = d.getVar('BB_TASKDEPDATA', False)
for v in depd.values(): for v in depd.values():
@ -53,9 +53,9 @@ def get_sdk_install_targets(d, images_only=False):
sdk_install_targets += ' {}'.format(v[0]) sdk_install_targets += ' {}'.format(v[0])
if not images_only: if not images_only:
if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
sdk_install_targets += ' meta-world-pkgdata:do_allpackagedata' sdk_install_targets += ' meta-world-pkgdata:do_allpackagedata'
if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1': if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1':
sdk_install_targets += ' meta-extsdk-toolchain:do_populate_sysroot' sdk_install_targets += ' meta-extsdk-toolchain:do_populate_sysroot'
return sdk_install_targets return sdk_install_targets
@ -83,7 +83,7 @@ TOOLCHAIN_OUTPUTNAME_task-populate-sdk-ext = "${TOOLCHAINEXT_OUTPUTNAME}"
SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest"
SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest"
SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} Extensible SDK" SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} Extensible SDK"
def clean_esdk_builddir(d, sdkbasepath): def clean_esdk_builddir(d, sdkbasepath):
"""Clean up traces of the fake build for create_filtered_tasklist()""" """Clean up traces of the fake build for create_filtered_tasklist()"""
@ -110,7 +110,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
try: try:
with open(sdkbasepath + '/conf/local.conf', 'a') as f: with open(sdkbasepath + '/conf/local.conf', 'a') as f:
# Force the use of sstate from the build system # Force the use of sstate from the build system
f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR', True)) f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR'))
f.write('SSTATE_MIRRORS_forcevariable = ""\n') f.write('SSTATE_MIRRORS_forcevariable = ""\n')
# Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it
f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n') f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n')
@ -121,7 +121,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
# Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake
# will not allow in its COREBASE path, so we need to rename the directory temporarily # will not allow in its COREBASE path, so we need to rename the directory temporarily
temp_sdkbasepath = d.getVar('SDK_OUTPUT', True) + '/tmp-renamed-sdk' temp_sdkbasepath = d.getVar('SDK_OUTPUT') + '/tmp-renamed-sdk'
# Delete any existing temp dir # Delete any existing temp dir
try: try:
shutil.rmtree(temp_sdkbasepath) shutil.rmtree(temp_sdkbasepath)
@ -130,7 +130,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath):
os.rename(sdkbasepath, temp_sdkbasepath) os.rename(sdkbasepath, temp_sdkbasepath)
try: try:
cmdprefix = '. %s .; ' % conf_initpath cmdprefix = '. %s .; ' % conf_initpath
logfile = d.getVar('WORKDIR', True) + '/tasklist_bb_log.txt' logfile = d.getVar('WORKDIR') + '/tasklist_bb_log.txt'
try: try:
oe.copy_buildsystem.check_sstate_task_list(d, get_sdk_install_targets(d), tasklistfile, cmdprefix=cmdprefix, cwd=temp_sdkbasepath, logfile=logfile) oe.copy_buildsystem.check_sstate_task_list(d, get_sdk_install_targets(d), tasklistfile, cmdprefix=cmdprefix, cwd=temp_sdkbasepath, logfile=logfile)
except bb.process.ExecutionError as e: except bb.process.ExecutionError as e:
@ -152,7 +152,7 @@ python copy_buildsystem () {
import glob import glob
import oe.copy_buildsystem import oe.copy_buildsystem
oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT', True) oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT')
conf_bbpath = '' conf_bbpath = ''
conf_initpath = '' conf_initpath = ''
@ -160,10 +160,10 @@ python copy_buildsystem () {
# Copy in all metadata layers + bitbake (as repositories) # Copy in all metadata layers + bitbake (as repositories)
buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d) buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d)
baseoutpath = d.getVar('SDK_OUTPUT', True) + '/' + d.getVar('SDKPATH', True) baseoutpath = d.getVar('SDK_OUTPUT') + '/' + d.getVar('SDKPATH')
# Determine if we're building a derivative extensible SDK (from devtool build-sdk) # Determine if we're building a derivative extensible SDK (from devtool build-sdk)
derivative = (d.getVar('SDK_DERIVATIVE', True) or '') == '1' derivative = (d.getVar('SDK_DERIVATIVE') or '') == '1'
if derivative: if derivative:
workspace_name = 'orig-workspace' workspace_name = 'orig-workspace'
else: else:
@ -171,7 +171,7 @@ python copy_buildsystem () {
layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers', workspace_name) layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers', workspace_name)
sdkbblayers = [] sdkbblayers = []
corebase = os.path.basename(d.getVar('COREBASE', True)) corebase = os.path.basename(d.getVar('COREBASE'))
for layer in layers_copied: for layer in layers_copied:
if corebase == os.path.basename(layer): if corebase == os.path.basename(layer):
conf_bbpath = os.path.join('layers', layer, 'bitbake') conf_bbpath = os.path.join('layers', layer, 'bitbake')
@ -202,8 +202,8 @@ python copy_buildsystem () {
config.set('General', 'init_path', conf_initpath) config.set('General', 'init_path', conf_initpath)
config.set('General', 'core_meta_subdir', core_meta_subdir) config.set('General', 'core_meta_subdir', core_meta_subdir)
config.add_section('SDK') config.add_section('SDK')
config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS', True)) config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS'))
updateurl = d.getVar('SDK_UPDATE_URL', True) updateurl = d.getVar('SDK_UPDATE_URL')
if updateurl: if updateurl:
config.set('SDK', 'updateserver', updateurl) config.set('SDK', 'updateserver', updateurl)
bb.utils.mkdirhier(os.path.join(baseoutpath, 'conf')) bb.utils.mkdirhier(os.path.join(baseoutpath, 'conf'))
@ -215,7 +215,7 @@ python copy_buildsystem () {
pass pass
# Create a layer for new recipes / appends # Create a layer for new recipes / appends
bbpath = d.getVar('BBPATH', True) bbpath = d.getVar('BBPATH')
bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')]) bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')])
# Create bblayers.conf # Create bblayers.conf
@ -248,16 +248,16 @@ python copy_buildsystem () {
bb.utils.mkdirhier(uninative_outdir) bb.utils.mkdirhier(uninative_outdir)
shutil.copy(uninative_file, uninative_outdir) shutil.copy(uninative_file, uninative_outdir)
env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE', True) or '').split() env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE') or '').split()
env_whitelist_values = {} env_whitelist_values = {}
# Create local.conf # Create local.conf
builddir = d.getVar('TOPDIR', True) builddir = d.getVar('TOPDIR')
if derivative: if derivative:
shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf') shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf')
else: else:
local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST', True) or '').split() local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST') or '').split()
local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST', True) or '').split() local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST') or '').split()
def handle_var(varname, origvalue, op, newlines): def handle_var(varname, origvalue, op, newlines):
if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist): if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist):
newlines.append('# Removed original setting of %s\n' % varname) newlines.append('# Removed original setting of %s\n' % varname)
@ -285,7 +285,7 @@ python copy_buildsystem () {
f.write('DL_DIR = "${TOPDIR}/downloads"\n') f.write('DL_DIR = "${TOPDIR}/downloads"\n')
f.write('INHERIT += "%s"\n' % 'uninative') f.write('INHERIT += "%s"\n' % 'uninative')
f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH', True), uninative_checksum)) f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH'), uninative_checksum))
f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False)) f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False))
# Some classes are not suitable for SDK, remove them from INHERIT # Some classes are not suitable for SDK, remove them from INHERIT
@ -319,7 +319,7 @@ python copy_buildsystem () {
# If you define a sdk_extraconf() function then it can contain additional config # If you define a sdk_extraconf() function then it can contain additional config
# (Though this is awkward; sdk-extra.conf should probably be used instead) # (Though this is awkward; sdk-extra.conf should probably be used instead)
extraconf = (d.getVar('sdk_extraconf', True) or '').strip() extraconf = (d.getVar('sdk_extraconf') or '').strip()
if extraconf: if extraconf:
# Strip off any leading / trailing spaces # Strip off any leading / trailing spaces
for line in extraconf.splitlines(): for line in extraconf.splitlines():
@ -352,7 +352,7 @@ python copy_buildsystem () {
# BB_ENV_EXTRAWHITE) are set in the SDK's configuration # BB_ENV_EXTRAWHITE) are set in the SDK's configuration
extralines = [] extralines = []
for name, value in env_whitelist_values.items(): for name, value in env_whitelist_values.items():
actualvalue = d.getVar(name, True) or '' actualvalue = d.getVar(name) or ''
if value != actualvalue: if value != actualvalue:
extralines.append('%s = "%s"\n' % (name, actualvalue)) extralines.append('%s = "%s"\n' % (name, actualvalue))
if extralines: if extralines:
@ -365,7 +365,7 @@ python copy_buildsystem () {
# Filter the locked signatures file to just the sstate tasks we are interested in # Filter the locked signatures file to just the sstate tasks we are interested in
excluded_targets = get_sdk_install_targets(d, images_only=True) excluded_targets = get_sdk_install_targets(d, images_only=True)
sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc'
lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc' lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc'
oe.copy_buildsystem.prune_lockedsigs([], oe.copy_buildsystem.prune_lockedsigs([],
excluded_targets.split(), excluded_targets.split(),
@ -378,36 +378,36 @@ python copy_buildsystem () {
# uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d) # uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d)
fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d) fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d)
sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1') sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1')
sdk_ext_type = d.getVar('SDK_EXT_TYPE', True) sdk_ext_type = d.getVar('SDK_EXT_TYPE')
if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative: if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative:
# Create the filtered task list used to generate the sstate cache shipped with the SDK # Create the filtered task list used to generate the sstate cache shipped with the SDK
tasklistfn = d.getVar('WORKDIR', True) + '/tasklist.txt' tasklistfn = d.getVar('WORKDIR') + '/tasklist.txt'
create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath) create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath)
else: else:
tasklistfn = None tasklistfn = None
# Add packagedata if enabled # Add packagedata if enabled
if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': if d.getVar('SDK_INCLUDE_PKGDATA') == '1':
lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base.inc' lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc'
lockedsigs_copy = d.getVar('WORKDIR', True) + '/locked-sigs-copy.inc' lockedsigs_copy = d.getVar('WORKDIR') + '/locked-sigs-copy.inc'
shutil.move(lockedsigs_pruned, lockedsigs_base) shutil.move(lockedsigs_pruned, lockedsigs_base)
oe.copy_buildsystem.merge_lockedsigs(['do_packagedata'], oe.copy_buildsystem.merge_lockedsigs(['do_packagedata'],
lockedsigs_base, lockedsigs_base,
d.getVar('STAGING_DIR_HOST', True) + '/world-pkgdata/locked-sigs-pkgdata.inc', d.getVar('STAGING_DIR_HOST') + '/world-pkgdata/locked-sigs-pkgdata.inc',
lockedsigs_pruned, lockedsigs_pruned,
lockedsigs_copy) lockedsigs_copy)
if sdk_include_toolchain: if sdk_include_toolchain:
lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base2.inc' lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base2.inc'
lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST', True) + '/locked-sigs/locked-sigs-extsdk-toolchain.inc' lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST') + '/locked-sigs/locked-sigs-extsdk-toolchain.inc'
shutil.move(lockedsigs_pruned, lockedsigs_base) shutil.move(lockedsigs_pruned, lockedsigs_base)
oe.copy_buildsystem.merge_lockedsigs([], oe.copy_buildsystem.merge_lockedsigs([],
lockedsigs_base, lockedsigs_base,
lockedsigs_toolchain, lockedsigs_toolchain,
lockedsigs_pruned) lockedsigs_pruned)
oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_toolchain, oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_toolchain,
d.getVar('SSTATE_DIR', True), d.getVar('SSTATE_DIR'),
sstate_out, d, sstate_out, d,
fixedlsbstring, fixedlsbstring,
filterfile=tasklistfn) filterfile=tasklistfn)
@ -417,22 +417,22 @@ python copy_buildsystem () {
# Assume the user is not going to set up an additional sstate # Assume the user is not going to set up an additional sstate
# mirror, thus we need to copy the additional artifacts (from # mirror, thus we need to copy the additional artifacts (from
# workspace recipes) into the derivative SDK # workspace recipes) into the derivative SDK
lockedsigs_orig = d.getVar('TOPDIR', True) + '/conf/locked-sigs.inc' lockedsigs_orig = d.getVar('TOPDIR') + '/conf/locked-sigs.inc'
if os.path.exists(lockedsigs_orig): if os.path.exists(lockedsigs_orig):
lockedsigs_extra = d.getVar('WORKDIR', True) + '/locked-sigs-extra.inc' lockedsigs_extra = d.getVar('WORKDIR') + '/locked-sigs-extra.inc'
oe.copy_buildsystem.merge_lockedsigs(None, oe.copy_buildsystem.merge_lockedsigs(None,
lockedsigs_orig, lockedsigs_orig,
lockedsigs_pruned, lockedsigs_pruned,
None, None,
lockedsigs_extra) lockedsigs_extra)
oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_extra, oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_extra,
d.getVar('SSTATE_DIR', True), d.getVar('SSTATE_DIR'),
sstate_out, d, sstate_out, d,
fixedlsbstring, fixedlsbstring,
filterfile=tasklistfn) filterfile=tasklistfn)
else: else:
oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_pruned, oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_pruned,
d.getVar('SSTATE_DIR', True), d.getVar('SSTATE_DIR'),
sstate_out, d, sstate_out, d,
fixedlsbstring, fixedlsbstring,
filterfile=tasklistfn) filterfile=tasklistfn)
@ -463,24 +463,24 @@ python copy_buildsystem () {
def get_current_buildtools(d): def get_current_buildtools(d):
"""Get the file name of the current buildtools installer""" """Get the file name of the current buildtools installer"""
import glob import glob
btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY', True), '*-buildtools-nativesdk-standalone-*.sh')) btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY'), '*-buildtools-nativesdk-standalone-*.sh'))
btfiles.sort(key=os.path.getctime) btfiles.sort(key=os.path.getctime)
return os.path.basename(btfiles[-1]) return os.path.basename(btfiles[-1])
def get_sdk_required_utilities(buildtools_fn, d): def get_sdk_required_utilities(buildtools_fn, d):
"""Find required utilities that aren't provided by the buildtools""" """Find required utilities that aren't provided by the buildtools"""
sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES', True) or '').split() sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES') or '').split()
sanity_required_utilities.append(d.expand('${BUILD_PREFIX}gcc')) sanity_required_utilities.append(d.expand('${BUILD_PREFIX}gcc'))
sanity_required_utilities.append(d.expand('${BUILD_PREFIX}g++')) sanity_required_utilities.append(d.expand('${BUILD_PREFIX}g++'))
buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY', True), buildtools_fn) buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY'), buildtools_fn)
filelist, _ = bb.process.run('%s -l' % buildtools_installer) filelist, _ = bb.process.run('%s -l' % buildtools_installer)
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
localdata.setVar('SDKPATH', '.') localdata.setVar('SDKPATH', '.')
sdkpathnative = localdata.getVar('SDKPATHNATIVE', True) sdkpathnative = localdata.getVar('SDKPATHNATIVE')
sdkbindirs = [localdata.getVar('bindir_nativesdk', True), sdkbindirs = [localdata.getVar('bindir_nativesdk'),
localdata.getVar('sbindir_nativesdk', True), localdata.getVar('sbindir_nativesdk'),
localdata.getVar('base_bindir_nativesdk', True), localdata.getVar('base_bindir_nativesdk'),
localdata.getVar('base_sbindir_nativesdk', True)] localdata.getVar('base_sbindir_nativesdk')]
for line in filelist.splitlines(): for line in filelist.splitlines():
splitline = line.split() splitline = line.split()
if len(splitline) > 5: if len(splitline) > 5:
@ -509,7 +509,7 @@ install_tools() {
# (they get populated from sstate on installation) # (they get populated from sstate on installation)
unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd" unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd"
if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then
binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR', True))} binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR'))}
lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path
fi fi
touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase
@ -611,8 +611,8 @@ SDK_INSTALL_TARGETS = ""
fakeroot python do_populate_sdk_ext() { fakeroot python do_populate_sdk_ext() {
# FIXME hopefully we can remove this restriction at some point, but uninative # FIXME hopefully we can remove this restriction at some point, but uninative
# currently forces this upon us # currently forces this upon us
if d.getVar('SDK_ARCH', True) != d.getVar('BUILD_ARCH', True): if d.getVar('SDK_ARCH') != d.getVar('BUILD_ARCH'):
bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH', True), d.getVar('BUILD_ARCH', True))) bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH'), d.getVar('BUILD_ARCH')))
d.setVar('SDK_INSTALL_TARGETS', get_sdk_install_targets(d)) d.setVar('SDK_INSTALL_TARGETS', get_sdk_install_targets(d))
buildtools_fn = get_current_buildtools(d) buildtools_fn = get_current_buildtools(d)
@ -626,7 +626,7 @@ fakeroot python do_populate_sdk_ext() {
def get_ext_sdk_depends(d): def get_ext_sdk_depends(d):
# Note: the deps varflag is a list not a string, so we need to specify expand=False # Note: the deps varflag is a list not a string, so we need to specify expand=False
deps = d.getVarFlag('do_image_complete', 'deps', False) deps = d.getVarFlag('do_image_complete', 'deps', False)
pn = d.getVar('PN', True) pn = d.getVar('PN')
deplist = ['%s:%s' % (pn, dep) for dep in deps] deplist = ['%s:%s' % (pn, dep) for dep in deps]
for task in ['do_image_complete', 'do_rootfs', 'do_build']: for task in ['do_image_complete', 'do_rootfs', 'do_build']:
deplist.extend((d.getVarFlag(task, 'depends', True) or '').split()) deplist.extend((d.getVarFlag(task, 'depends', True) or '').split())
@ -637,7 +637,7 @@ python do_sdk_depends() {
# dependencies we don't need to (e.g. buildtools-tarball) and bringing those # dependencies we don't need to (e.g. buildtools-tarball) and bringing those
# into the SDK's sstate-cache # into the SDK's sstate-cache
import oe.copy_buildsystem import oe.copy_buildsystem
sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc'
oe.copy_buildsystem.generate_locked_sigs(sigfile, d) oe.copy_buildsystem.generate_locked_sigs(sigfile, d)
} }
addtask sdk_depends addtask sdk_depends
@ -658,10 +658,10 @@ do_populate_sdk_ext[dirs] = "${@d.getVarFlag('do_populate_sdk', 'dirs', False)}"
do_populate_sdk_ext[depends] = "${@d.getVarFlag('do_populate_sdk', 'depends', False)} \ do_populate_sdk_ext[depends] = "${@d.getVarFlag('do_populate_sdk', 'depends', False)} \
buildtools-tarball:do_populate_sdk uninative-tarball:do_populate_sdk \ buildtools-tarball:do_populate_sdk uninative-tarball:do_populate_sdk \
${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1' else ''} \ ${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA') == '1' else ''} \
${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1' else ''}" ${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1' else ''}"
do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':do_build' for x in d.getVar('SDK_TARGETS', True).split()])}" do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':do_build' for x in d.getVar('SDK_TARGETS').split()])}"
# Make sure code changes can result in rebuild # Make sure code changes can result in rebuild
do_populate_sdk_ext[vardeps] += "copy_buildsystem \ do_populate_sdk_ext[vardeps] += "copy_buildsystem \

View File

@ -15,7 +15,7 @@ python prexport_handler () {
if isinstance(e, bb.event.RecipeParsed): if isinstance(e, bb.event.RecipeParsed):
import oe.prservice import oe.prservice
#get all PR values for the current PRAUTOINX #get all PR values for the current PRAUTOINX
ver = e.data.getVar('PRSERV_DUMPOPT_VERSION', True) ver = e.data.getVar('PRSERV_DUMPOPT_VERSION')
ver = ver.replace('%','-') ver = ver.replace('%','-')
retval = oe.prservice.prserv_dump_db(e.data) retval = oe.prservice.prserv_dump_db(e.data)
if not retval: if not retval:
@ -40,7 +40,7 @@ python prexport_handler () {
import oe.prservice import oe.prservice
oe.prservice.prserv_check_avail(e.data) oe.prservice.prserv_check_avail(e.data)
#remove dumpfile #remove dumpfile
bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE', True)) bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE'))
elif isinstance(e, bb.event.ParseCompleted): elif isinstance(e, bb.event.ParseCompleted):
import oe.prservice import oe.prservice
#dump meta info of tables #dump meta info of tables

View File

@ -61,7 +61,7 @@ python () {
d.setVarFlag('do_install_ptest_base', 'fakeroot', '1') d.setVarFlag('do_install_ptest_base', 'fakeroot', '1')
# Remove all '*ptest_base' tasks when ptest is not enabled # Remove all '*ptest_base' tasks when ptest is not enabled
if not(d.getVar('PTEST_ENABLED', True) == "1"): if not(d.getVar('PTEST_ENABLED') == "1"):
for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']: for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']:
bb.build.deltask(i, d) bb.build.deltask(i, d)
} }

View File

@ -4,12 +4,12 @@
# #
def qemu_target_binary(data): def qemu_target_binary(data):
package_arch = data.getVar("PACKAGE_ARCH", True) package_arch = data.getVar("PACKAGE_ARCH")
qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch, True) or "") qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "")
if qemu_target_binary: if qemu_target_binary:
return qemu_target_binary return qemu_target_binary
target_arch = data.getVar("TARGET_ARCH", True) target_arch = data.getVar("TARGET_ARCH")
if target_arch in ("i486", "i586", "i686"): if target_arch in ("i486", "i586", "i686"):
target_arch = "i386" target_arch = "i386"
elif target_arch == "powerpc": elif target_arch == "powerpc":
@ -26,7 +26,7 @@ def qemu_wrapper_cmdline(data, rootfs_path, library_paths):
if qemu_binary == "qemu-allarch": if qemu_binary == "qemu-allarch":
qemu_binary = "qemuwrapper" qemu_binary = "qemuwrapper"
qemu_options = data.getVar("QEMU_OPTIONS", True) qemu_options = data.getVar("QEMU_OPTIONS")
return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\ return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\
+ " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " " + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " "
@ -52,7 +52,7 @@ def qemu_run_binary(data, rootfs_path, binary):
# this dance). For others (e.g. arm) a -cpu option is not necessary, since the # this dance). For others (e.g. arm) a -cpu option is not necessary, since the
# qemu-arm default CPU supports all required architecture levels. # qemu-arm default CPU supports all required architecture levels.
QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) or ""}" QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH'), True) or ""}"
QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}" QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}"
QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2" QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2"

View File

@ -55,17 +55,17 @@ do_write_qemuboot_conf[vardeps] += "${@' '.join(qemuboot_vars(d))}"
python do_write_qemuboot_conf() { python do_write_qemuboot_conf() {
import configparser import configparser
qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_NAME', True)) qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_NAME'))
qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_LINK_NAME', True)) qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_LINK_NAME'))
cf = configparser.ConfigParser() cf = configparser.ConfigParser()
cf.add_section('config_bsp') cf.add_section('config_bsp')
for k in qemuboot_vars(d): for k in qemuboot_vars(d):
cf.set('config_bsp', k, '%s' % d.getVar(k, True)) cf.set('config_bsp', k, '%s' % d.getVar(k))
# QB_DEFAULT_KERNEL's value of KERNEL_IMAGETYPE is the name of a symlink # QB_DEFAULT_KERNEL's value of KERNEL_IMAGETYPE is the name of a symlink
# to the kernel file, which hinders relocatability of the qb conf. # to the kernel file, which hinders relocatability of the qb conf.
# Read the link and replace it with the full filename of the target. # Read the link and replace it with the full filename of the target.
kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('QB_DEFAULT_KERNEL', True)) kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('QB_DEFAULT_KERNEL'))
kernel = os.path.realpath(kernel_link) kernel = os.path.realpath(kernel_link)
cf.set('config_bsp', 'QB_DEFAULT_KERNEL', kernel) cf.set('config_bsp', 'QB_DEFAULT_KERNEL', kernel)

View File

@ -1,5 +1,5 @@
def __note(msg, d): def __note(msg, d):
bb.note("%s: recipe_sanity: %s" % (d.getVar("P", True), msg)) bb.note("%s: recipe_sanity: %s" % (d.getVar("P"), msg))
__recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" __recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS"
def bad_runtime_vars(cfgdata, d): def bad_runtime_vars(cfgdata, d):
@ -7,7 +7,7 @@ def bad_runtime_vars(cfgdata, d):
bb.data.inherits_class("cross", d): bb.data.inherits_class("cross", d):
return return
for var in d.getVar("__recipe_sanity_badruntimevars", True).split(): for var in d.getVar("__recipe_sanity_badruntimevars").split():
val = d.getVar(var, False) val = d.getVar(var, False)
if val and val != cfgdata.get(var): if val and val != cfgdata.get(var):
__note("%s should be %s_${PN}" % (var, var), d) __note("%s should be %s_${PN}" % (var, var), d)
@ -15,11 +15,11 @@ def bad_runtime_vars(cfgdata, d):
__recipe_sanity_reqvars = "DESCRIPTION" __recipe_sanity_reqvars = "DESCRIPTION"
__recipe_sanity_reqdiffvars = "" __recipe_sanity_reqdiffvars = ""
def req_vars(cfgdata, d): def req_vars(cfgdata, d):
for var in d.getVar("__recipe_sanity_reqvars", True).split(): for var in d.getVar("__recipe_sanity_reqvars").split():
if not d.getVar(var, False): if not d.getVar(var, False):
__note("%s should be set" % var, d) __note("%s should be set" % var, d)
for var in d.getVar("__recipe_sanity_reqdiffvars", True).split(): for var in d.getVar("__recipe_sanity_reqdiffvars").split():
val = d.getVar(var, False) val = d.getVar(var, False)
cfgval = cfgdata.get(var) cfgval = cfgdata.get(var)
@ -38,11 +38,11 @@ def var_renames_overwrite(cfgdata, d):
def incorrect_nonempty_PACKAGES(cfgdata, d): def incorrect_nonempty_PACKAGES(cfgdata, d):
if bb.data.inherits_class("native", d) or \ if bb.data.inherits_class("native", d) or \
bb.data.inherits_class("cross", d): bb.data.inherits_class("cross", d):
if d.getVar("PACKAGES", True): if d.getVar("PACKAGES"):
return True return True
def can_use_autotools_base(cfgdata, d): def can_use_autotools_base(cfgdata, d):
cfg = d.getVar("do_configure", True) cfg = d.getVar("do_configure")
if not bb.data.inherits_class("autotools", d): if not bb.data.inherits_class("autotools", d):
return False return False
@ -61,7 +61,7 @@ def can_delete_FILESPATH(cfgdata, d):
expected = cfgdata.get("FILESPATH") expected = cfgdata.get("FILESPATH")
expectedpaths = d.expand(expected) expectedpaths = d.expand(expected)
unexpanded = d.getVar("FILESPATH", False) unexpanded = d.getVar("FILESPATH", False)
filespath = d.getVar("FILESPATH", True).split(":") filespath = d.getVar("FILESPATH").split(":")
filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
for fp in filespath: for fp in filespath:
if not fp in expectedpaths: if not fp in expectedpaths:
@ -72,13 +72,13 @@ def can_delete_FILESPATH(cfgdata, d):
def can_delete_FILESDIR(cfgdata, d): def can_delete_FILESDIR(cfgdata, d):
expected = cfgdata.get("FILESDIR") expected = cfgdata.get("FILESDIR")
#expected = "${@bb.utils.which(d.getVar('FILESPATH', True), '.')}" #expected = "${@bb.utils.which(d.getVar('FILESPATH'), '.')}"
unexpanded = d.getVar("FILESDIR", False) unexpanded = d.getVar("FILESDIR", False)
if unexpanded is None: if unexpanded is None:
return False return False
expanded = os.path.normpath(d.getVar("FILESDIR", True)) expanded = os.path.normpath(d.getVar("FILESDIR"))
filespath = d.getVar("FILESPATH", True).split(":") filespath = d.getVar("FILESPATH").split(":")
filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
return unexpanded != expected and \ return unexpanded != expected and \
@ -96,7 +96,7 @@ def can_delete_others(p, cfgdata, d):
continue continue
try: try:
expanded = d.getVar(k, True) expanded = d.getVar(k)
cfgexpanded = d.expand(cfgunexpanded) cfgexpanded = d.expand(cfgunexpanded)
except bb.fetch.ParameterError: except bb.fetch.ParameterError:
continue continue
@ -108,8 +108,8 @@ def can_delete_others(p, cfgdata, d):
(p, cfgunexpanded, unexpanded, expanded)) (p, cfgunexpanded, unexpanded, expanded))
python do_recipe_sanity () { python do_recipe_sanity () {
p = d.getVar("P", True) p = d.getVar("P")
p = "%s %s %s" % (d.getVar("PN", True), d.getVar("PV", True), d.getVar("PR", True)) p = "%s %s %s" % (d.getVar("PN"), d.getVar("PV"), d.getVar("PR"))
sanitychecks = [ sanitychecks = [
(can_delete_FILESDIR, "candidate for removal of FILESDIR"), (can_delete_FILESDIR, "candidate for removal of FILESDIR"),

View File

@ -10,7 +10,7 @@ ERR_REPORT_DIR ?= "${LOG_DIR}/error-report"
def errorreport_getdata(e): def errorreport_getdata(e):
import codecs import codecs
logpath = e.data.getVar('ERR_REPORT_DIR', True) logpath = e.data.getVar('ERR_REPORT_DIR')
datafile = os.path.join(logpath, "error-report.txt") datafile = os.path.join(logpath, "error-report.txt")
with codecs.open(datafile, 'r', 'utf-8') as f: with codecs.open(datafile, 'r', 'utf-8') as f:
data = f.read() data = f.read()
@ -19,7 +19,7 @@ def errorreport_getdata(e):
def errorreport_savedata(e, newdata, file): def errorreport_savedata(e, newdata, file):
import json import json
import codecs import codecs
logpath = e.data.getVar('ERR_REPORT_DIR', True) logpath = e.data.getVar('ERR_REPORT_DIR')
datafile = os.path.join(logpath, file) datafile = os.path.join(logpath, file)
with codecs.open(datafile, 'w', 'utf-8') as f: with codecs.open(datafile, 'w', 'utf-8') as f:
json.dump(newdata, f, indent=4, sort_keys=True) json.dump(newdata, f, indent=4, sort_keys=True)
@ -29,18 +29,18 @@ python errorreport_handler () {
import json import json
import codecs import codecs
logpath = e.data.getVar('ERR_REPORT_DIR', True) logpath = e.data.getVar('ERR_REPORT_DIR')
datafile = os.path.join(logpath, "error-report.txt") datafile = os.path.join(logpath, "error-report.txt")
if isinstance(e, bb.event.BuildStarted): if isinstance(e, bb.event.BuildStarted):
bb.utils.mkdirhier(logpath) bb.utils.mkdirhier(logpath)
data = {} data = {}
machine = e.data.getVar("MACHINE", True) machine = e.data.getVar("MACHINE")
data['machine'] = machine data['machine'] = machine
data['build_sys'] = e.data.getVar("BUILD_SYS", True) data['build_sys'] = e.data.getVar("BUILD_SYS")
data['nativelsb'] = e.data.getVar("NATIVELSBSTRING", True) data['nativelsb'] = e.data.getVar("NATIVELSBSTRING")
data['distro'] = e.data.getVar("DISTRO", True) data['distro'] = e.data.getVar("DISTRO")
data['target_sys'] = e.data.getVar("TARGET_SYS", True) data['target_sys'] = e.data.getVar("TARGET_SYS")
data['failures'] = [] data['failures'] = []
data['component'] = " ".join(e.getPkgs()) data['component'] = " ".join(e.getPkgs())
data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data)) data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data))
@ -51,7 +51,7 @@ python errorreport_handler () {
elif isinstance(e, bb.build.TaskFailed): elif isinstance(e, bb.build.TaskFailed):
task = e.task task = e.task
taskdata={} taskdata={}
log = e.data.getVar('BB_LOGFILE', True) log = e.data.getVar('BB_LOGFILE')
taskdata['package'] = e.data.expand("${PF}") taskdata['package'] = e.data.expand("${PF}")
taskdata['task'] = task taskdata['task'] = task
if log: if log:
@ -61,7 +61,7 @@ python errorreport_handler () {
# Replace host-specific paths so the logs are cleaner # Replace host-specific paths so the logs are cleaner
for d in ("TOPDIR", "TMPDIR"): for d in ("TOPDIR", "TMPDIR"):
s = e.data.getVar(d, True) s = e.data.getVar(d)
if s: if s:
logdata = logdata.replace(s, d) logdata = logdata.replace(s, d)
@ -92,7 +92,7 @@ python errorreport_handler () {
bb.utils.unlockfile(lock) bb.utils.unlockfile(lock)
failures = jsondata['failures'] failures = jsondata['failures']
if(len(failures) > 0): if(len(failures) > 0):
filename = "error_report_" + e.data.getVar("BUILDNAME", True)+".txt" filename = "error_report_" + e.data.getVar("BUILDNAME")+".txt"
datafile = errorreport_savedata(e, jsondata, filename) datafile = errorreport_savedata(e, jsondata, filename)
bb.note("The errors for this build are stored in %s\nYou can send the errors to a reports server by running:\n send-error-report %s [-s server]" % (datafile, datafile)) bb.note("The errors for this build are stored in %s\nYou can send the errors to a reports server by running:\n send-error-report %s [-s server]" % (datafile, datafile))
bb.note("The contents of these logs will be posted in public if you use the above command with the default server. Please ensure you remove any identifying or proprietary information when prompted before sending.") bb.note("The contents of these logs will be posted in public if you use the above command with the default server. Please ensure you remove any identifying or proprietary information when prompted before sending.")

View File

@ -119,10 +119,10 @@ rm_work_rootfs[cleandirs] = "${WORKDIR}/rootfs"
python () { python () {
if bb.data.inherits_class('kernel', d): if bb.data.inherits_class('kernel', d):
d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN", True)) d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN"))
# If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe. # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe.
excludes = (d.getVar("RM_WORK_EXCLUDE", True) or "").split() excludes = (d.getVar("RM_WORK_EXCLUDE") or "").split()
pn = d.getVar("PN", True) pn = d.getVar("PN")
if pn in excludes: if pn in excludes:
d.delVarFlag('rm_work_rootfs', 'cleandirs') d.delVarFlag('rm_work_rootfs', 'cleandirs')
d.delVarFlag('rm_work_populatesdk', 'cleandirs') d.delVarFlag('rm_work_populatesdk', 'cleandirs')

View File

@ -217,9 +217,9 @@ python write_image_manifest () {
from oe.rootfs import image_list_installed_packages from oe.rootfs import image_list_installed_packages
from oe.utils import format_pkg_list from oe.utils import format_pkg_list
deploy_dir = d.getVar('IMGDEPLOYDIR', True) deploy_dir = d.getVar('IMGDEPLOYDIR')
link_name = d.getVar('IMAGE_LINK_NAME', True) link_name = d.getVar('IMAGE_LINK_NAME')
manifest_name = d.getVar('IMAGE_MANIFEST', True) manifest_name = d.getVar('IMAGE_MANIFEST')
if not manifest_name: if not manifest_name:
return return

View File

@ -14,7 +14,7 @@ do_rootfs[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock"
do_populate_sdk[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock" do_populate_sdk[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock"
python rootfs_deb_bad_recommendations() { python rootfs_deb_bad_recommendations() {
if d.getVar("BAD_RECOMMENDATIONS", True): if d.getVar("BAD_RECOMMENDATIONS"):
bb.warn("Debian package install does not support BAD_RECOMMENDATIONS") bb.warn("Debian package install does not support BAD_RECOMMENDATIONS")
} }
do_rootfs[prefuncs] += "rootfs_deb_bad_recommendations" do_rootfs[prefuncs] += "rootfs_deb_bad_recommendations"
@ -25,7 +25,7 @@ opkglibdir = "${localstatedir}/lib/opkg"
python () { python () {
# Map TARGET_ARCH to Debian's ideas about architectures # Map TARGET_ARCH to Debian's ideas about architectures
darch = d.getVar('SDK_ARCH', True) darch = d.getVar('SDK_ARCH')
if darch in ["x86", "i486", "i586", "i686", "pentium"]: if darch in ["x86", "i486", "i586", "i686", "pentium"]:
d.setVar('DEB_SDK_ARCH', 'i386') d.setVar('DEB_SDK_ARCH', 'i386')
elif darch == "x86_64": elif darch == "x86_64":

View File

@ -27,7 +27,7 @@ MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg"
python () { python () {
if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
flags = flags.replace("do_package_write_ipk", "") flags = flags.replace("do_package_write_ipk", "")
flags = flags.replace("do_deploy", "") flags = flags.replace("do_deploy", "")

View File

@ -25,7 +25,7 @@ do_rootfs[recrdeptask] += "do_package_write_rpm"
do_rootfs[vardeps] += "PACKAGE_FEED_URIS" do_rootfs[vardeps] += "PACKAGE_FEED_URIS"
python () { python () {
if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
flags = flags.replace("do_package_write_rpm", "") flags = flags.replace("do_package_write_rpm", "")
flags = flags.replace("do_deploy", "") flags = flags.replace("do_deploy", "")

View File

@ -6,7 +6,7 @@ SANITY_REQUIRED_UTILITIES ?= "patch diffstat makeinfo git bzip2 tar \
gzip gawk chrpath wget cpio perl file" gzip gawk chrpath wget cpio perl file"
def bblayers_conf_file(d): def bblayers_conf_file(d):
return os.path.join(d.getVar('TOPDIR', True), 'conf/bblayers.conf') return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf')
def sanity_conf_read(fn): def sanity_conf_read(fn):
with open(fn, 'r') as f: with open(fn, 'r') as f:
@ -39,8 +39,8 @@ SANITY_DIFF_TOOL ?= "meld"
SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample" SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample"
python oecore_update_localconf() { python oecore_update_localconf() {
# Check we are using a valid local.conf # Check we are using a valid local.conf
current_conf = d.getVar('CONF_VERSION', True) current_conf = d.getVar('CONF_VERSION')
conf_version = d.getVar('LOCALCONF_VERSION', True) conf_version = d.getVar('LOCALCONF_VERSION')
failmsg = """Your version of local.conf was generated from an older/newer version of failmsg = """Your version of local.conf was generated from an older/newer version of
local.conf.sample and there have been updates made to this file. Please compare the two local.conf.sample and there have been updates made to this file. Please compare the two
@ -59,8 +59,8 @@ is a good way to visualise the changes."""
SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample" SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample"
python oecore_update_siteconf() { python oecore_update_siteconf() {
# If we have a site.conf, check it's valid # If we have a site.conf, check it's valid
current_sconf = d.getVar('SCONF_VERSION', True) current_sconf = d.getVar('SCONF_VERSION')
sconf_version = d.getVar('SITE_CONF_VERSION', True) sconf_version = d.getVar('SITE_CONF_VERSION')
failmsg = """Your version of site.conf was generated from an older version of failmsg = """Your version of site.conf was generated from an older version of
site.conf.sample and there have been updates made to this file. Please compare the two site.conf.sample and there have been updates made to this file. Please compare the two
@ -80,8 +80,8 @@ SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample"
python oecore_update_bblayers() { python oecore_update_bblayers() {
# bblayers.conf is out of date, so see if we can resolve that # bblayers.conf is out of date, so see if we can resolve that
current_lconf = int(d.getVar('LCONF_VERSION', True)) current_lconf = int(d.getVar('LCONF_VERSION'))
lconf_version = int(d.getVar('LAYER_CONF_VERSION', True)) lconf_version = int(d.getVar('LAYER_CONF_VERSION'))
failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}). failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}).
Please compare your file against bblayers.conf.sample and merge any changes before continuing. Please compare your file against bblayers.conf.sample and merge any changes before continuing.
@ -141,7 +141,7 @@ is a good way to visualise the changes."""
# Handle rename of meta-yocto -> meta-poky # Handle rename of meta-yocto -> meta-poky
# This marks the start of separate version numbers but code is needed in OE-Core # This marks the start of separate version numbers but code is needed in OE-Core
# for the migration, one last time. # for the migration, one last time.
layers = d.getVar('BBLAYERS', True).split() layers = d.getVar('BBLAYERS').split()
layers = [ os.path.basename(path) for path in layers ] layers = [ os.path.basename(path) for path in layers ]
if 'meta-yocto' in layers: if 'meta-yocto' in layers:
found = False found = False
@ -172,7 +172,7 @@ is a good way to visualise the changes."""
} }
def raise_sanity_error(msg, d, network_error=False): def raise_sanity_error(msg, d, network_error=False):
if d.getVar("SANITY_USE_EVENTS", True) == "1": if d.getVar("SANITY_USE_EVENTS") == "1":
try: try:
bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d) bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d)
except TypeError: except TypeError:
@ -198,7 +198,7 @@ def check_toolchain_tune_args(data, tune, multilib, errs):
return found_errors return found_errors
def check_toolchain_args_present(data, tune, multilib, tune_errors, which): def check_toolchain_args_present(data, tune, multilib, tune_errors, which):
args_set = (data.getVar("TUNE_%s" % which, True) or "").split() args_set = (data.getVar("TUNE_%s" % which) or "").split()
args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split() args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split()
args_missing = [] args_missing = []
@ -228,7 +228,7 @@ def check_toolchain_tune(data, tune, multilib):
localdata.setVar("OVERRIDES", overrides) localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib))
features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune, True) or "").split() features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune) or "").split()
if not features: if not features:
return "Tuning '%s' has no defined features, and cannot be used." % tune return "Tuning '%s' has no defined features, and cannot be used." % tune
valid_tunes = localdata.getVarFlags('TUNEVALID') or {} valid_tunes = localdata.getVarFlags('TUNEVALID') or {}
@ -248,9 +248,9 @@ def check_toolchain_tune(data, tune, multilib):
bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
else: else:
tune_errors.append("Feature '%s' is not defined." % feature) tune_errors.append("Feature '%s' is not defined." % feature)
whitelist = localdata.getVar("TUNEABI_WHITELIST", True) whitelist = localdata.getVar("TUNEABI_WHITELIST")
if whitelist: if whitelist:
tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune, True) tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune)
if not tuneabi: if not tuneabi:
tuneabi = tune tuneabi = tune
if True not in [x in whitelist.split() for x in tuneabi.split()]: if True not in [x in whitelist.split() for x in tuneabi.split()]:
@ -264,13 +264,13 @@ def check_toolchain_tune(data, tune, multilib):
def check_toolchain(data): def check_toolchain(data):
tune_error_set = [] tune_error_set = []
deftune = data.getVar("DEFAULTTUNE", True) deftune = data.getVar("DEFAULTTUNE")
tune_errors = check_toolchain_tune(data, deftune, 'default') tune_errors = check_toolchain_tune(data, deftune, 'default')
if tune_errors: if tune_errors:
tune_error_set.append(tune_errors) tune_error_set.append(tune_errors)
multilibs = (data.getVar("MULTILIB_VARIANTS", True) or "").split() multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split()
global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS", True) or "").split() global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split()
if multilibs: if multilibs:
seen_libs = [] seen_libs = []
@ -282,7 +282,7 @@ def check_toolchain(data):
seen_libs.append(lib) seen_libs.append(lib)
if not lib in global_multilibs: if not lib in global_multilibs:
tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib)
tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib, True) tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib)
if tune in seen_tunes: if tune in seen_tunes:
tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune)
else: else:
@ -360,10 +360,10 @@ def check_connectivity(d):
# URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
# using the same syntax as for SRC_URI. If the variable is not set # using the same syntax as for SRC_URI. If the variable is not set
# the check is skipped # the check is skipped
test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS', True) or "").split() test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split()
retval = "" retval = ""
bbn = d.getVar('BB_NO_NETWORK', True) bbn = d.getVar('BB_NO_NETWORK')
if bbn not in (None, '0', '1'): if bbn not in (None, '0', '1'):
return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn
@ -382,7 +382,7 @@ def check_connectivity(d):
except Exception as err: except Exception as err:
# Allow the message to be configured so that users can be # Allow the message to be configured so that users can be
# pointed to a support mechanism. # pointed to a support mechanism.
msg = data.getVar('CONNECTIVITY_CHECK_MSG', True) or "" msg = data.getVar('CONNECTIVITY_CHECK_MSG') or ""
if len(msg) == 0: if len(msg) == 0:
msg = "%s.\n" % err msg = "%s.\n" % err
msg += " Please ensure your host's network is configured correctly,\n" msg += " Please ensure your host's network is configured correctly,\n"
@ -395,7 +395,7 @@ def check_connectivity(d):
def check_supported_distro(sanity_data): def check_supported_distro(sanity_data):
from fnmatch import fnmatch from fnmatch import fnmatch
tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS', True) tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS')
if not tested_distros: if not tested_distros:
return return
@ -418,17 +418,17 @@ def check_sanity_validmachine(sanity_data):
messages = "" messages = ""
# Check TUNE_ARCH is set # Check TUNE_ARCH is set
if sanity_data.getVar('TUNE_ARCH', True) == 'INVALID': if sanity_data.getVar('TUNE_ARCH') == 'INVALID':
messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n' messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n'
# Check TARGET_OS is set # Check TARGET_OS is set
if sanity_data.getVar('TARGET_OS', True) == 'INVALID': if sanity_data.getVar('TARGET_OS') == 'INVALID':
messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n' messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n'
# Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS
pkgarchs = sanity_data.getVar('PACKAGE_ARCHS', True) pkgarchs = sanity_data.getVar('PACKAGE_ARCHS')
tunepkg = sanity_data.getVar('TUNE_PKGARCH', True) tunepkg = sanity_data.getVar('TUNE_PKGARCH')
defaulttune = sanity_data.getVar('DEFAULTTUNE', True) defaulttune = sanity_data.getVar('DEFAULTTUNE')
tunefound = False tunefound = False
seen = {} seen = {}
dups = [] dups = []
@ -476,7 +476,7 @@ def check_gcc_march(sanity_data):
result = True; result = True;
if not result: if not result:
build_arch = sanity_data.getVar('BUILD_ARCH', True) build_arch = sanity_data.getVar('BUILD_ARCH')
status,res = oe.utils.getstatusoutput(sanity_data.expand("${BUILD_CC} -march=%s gcc_test.c -o gcc_test" % build_arch)) status,res = oe.utils.getstatusoutput(sanity_data.expand("${BUILD_CC} -march=%s gcc_test.c -o gcc_test" % build_arch))
if status == 0: if status == 0:
message = "BUILD_CFLAGS_append = \" -march=%s\"" % build_arch message = "BUILD_CFLAGS_append = \" -march=%s\"" % build_arch
@ -564,11 +564,11 @@ def check_perl_modules(sanity_data):
return None return None
def sanity_check_conffiles(d): def sanity_check_conffiles(d):
funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS', True).split() funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split()
for func in funcs: for func in funcs:
conffile, current_version, required_version, func = func.split(":") conffile, current_version, required_version, func = func.split(":")
if check_conf_exists(conffile, d) and d.getVar(current_version, True) is not None and \ if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \
d.getVar(current_version, True) != d.getVar(required_version, True): d.getVar(current_version) != d.getVar(required_version):
try: try:
bb.build.exec_func(func, d, pythonexception=True) bb.build.exec_func(func, d, pythonexception=True)
except NotImplementedError as e: except NotImplementedError as e:
@ -581,8 +581,8 @@ def sanity_handle_abichanges(status, d):
# #
import subprocess import subprocess
current_abi = d.getVar('OELAYOUT_ABI', True) current_abi = d.getVar('OELAYOUT_ABI')
abifile = d.getVar('SANITY_ABIFILE', True) abifile = d.getVar('SANITY_ABIFILE')
if os.path.exists(abifile): if os.path.exists(abifile):
with open(abifile, "r") as f: with open(abifile, "r") as f:
abi = f.read().strip() abi = f.read().strip()
@ -677,12 +677,12 @@ def check_sanity_version_change(status, d):
missing = missing + "GNU make," missing = missing + "GNU make,"
if not check_app_exists('${BUILD_CC}', d): if not check_app_exists('${BUILD_CC}', d):
missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC", True) missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC")
if not check_app_exists('${BUILD_CXX}', d): if not check_app_exists('${BUILD_CXX}', d):
missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX", True) missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX")
required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES', True) required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES')
for util in required_utilities.split(): for util in required_utilities.split():
if not check_app_exists(util, d): if not check_app_exists(util, d):
@ -692,7 +692,7 @@ def check_sanity_version_change(status, d):
missing = missing.rstrip(',') missing = missing.rstrip(',')
status.addresult("Please install the following missing utilities: %s\n" % missing) status.addresult("Please install the following missing utilities: %s\n" % missing)
assume_provided = d.getVar('ASSUME_PROVIDED', True).split() assume_provided = d.getVar('ASSUME_PROVIDED').split()
# Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
if "diffstat-native" not in assume_provided: if "diffstat-native" not in assume_provided:
status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n') status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n')
@ -715,7 +715,7 @@ def check_sanity_version_change(status, d):
status.addresult(" __sync_bool_compare_and_swap (&atomic, 2, 3);\n") status.addresult(" __sync_bool_compare_and_swap (&atomic, 2, 3);\n")
# Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS)
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) status.addresult(check_create_long_filename(tmpdir, "TMPDIR"))
tmpdirmode = os.stat(tmpdir).st_mode tmpdirmode = os.stat(tmpdir).st_mode
if (tmpdirmode & stat.S_ISGID): if (tmpdirmode & stat.S_ISGID):
@ -739,7 +739,7 @@ def check_sanity_version_change(status, d):
if netcheck: if netcheck:
status.network_error = True status.network_error = True
nolibs = d.getVar('NO32LIBS', True) nolibs = d.getVar('NO32LIBS')
if not nolibs: if not nolibs:
lib32path = '/lib' lib32path = '/lib'
if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ): if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ):
@ -748,7 +748,7 @@ def check_sanity_version_change(status, d):
if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'): if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'):
status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n") status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n")
bbpaths = d.getVar('BBPATH', True).split(":") bbpaths = d.getVar('BBPATH').split(":")
if ("." in bbpaths or "./" in bbpaths or "" in bbpaths): if ("." in bbpaths or "./" in bbpaths or "" in bbpaths):
status.addresult("BBPATH references the current directory, either through " \ status.addresult("BBPATH references the current directory, either through " \
"an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\ "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\
@ -758,7 +758,7 @@ def check_sanity_version_change(status, d):
"references.\n" \ "references.\n" \
"Parsed BBPATH is" + str(bbpaths)); "Parsed BBPATH is" + str(bbpaths));
oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF', True) oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF')
if not oes_bb_conf: if not oes_bb_conf:
status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n')
@ -793,26 +793,26 @@ def check_sanity_everybuild(status, d):
# Check the bitbake version meets minimum requirements # Check the bitbake version meets minimum requirements
from distutils.version import LooseVersion from distutils.version import LooseVersion
minversion = d.getVar('BB_MIN_VERSION', True) minversion = d.getVar('BB_MIN_VERSION')
if (LooseVersion(bb.__version__) < LooseVersion(minversion)): if (LooseVersion(bb.__version__) < LooseVersion(minversion)):
status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__))
sanity_check_locale(d) sanity_check_locale(d)
paths = d.getVar('PATH', True).split(":") paths = d.getVar('PATH').split(":")
if "." in paths or "./" in paths or "" in paths: if "." in paths or "./" in paths or "" in paths:
status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n") status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n")
# Check that the DISTRO is valid, if set # Check that the DISTRO is valid, if set
# need to take into account DISTRO renaming DISTRO # need to take into account DISTRO renaming DISTRO
distro = d.getVar('DISTRO', True) distro = d.getVar('DISTRO')
if distro and distro != "nodistro": if distro and distro != "nodistro":
if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ): if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ):
status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO", True)) status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO"))
# Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't
# set, since so much relies on it being set. # set, since so much relies on it being set.
dldir = d.getVar('DL_DIR', True) dldir = d.getVar('DL_DIR')
if not dldir: if not dldir:
status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n") status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n")
if os.path.exists(dldir) and not os.access(dldir, os.W_OK): if os.path.exists(dldir) and not os.access(dldir, os.W_OK):
@ -821,7 +821,7 @@ def check_sanity_everybuild(status, d):
# Check that the MACHINE is valid, if it is set # Check that the MACHINE is valid, if it is set
machinevalid = True machinevalid = True
if d.getVar('MACHINE', True): if d.getVar('MACHINE'):
if not check_conf_exists("conf/machine/${MACHINE}.conf", d): if not check_conf_exists("conf/machine/${MACHINE}.conf", d):
status.addresult('Please set a valid MACHINE in your local.conf or environment\n') status.addresult('Please set a valid MACHINE in your local.conf or environment\n')
machinevalid = False machinevalid = False
@ -834,7 +834,7 @@ def check_sanity_everybuild(status, d):
status.addresult(check_toolchain(d)) status.addresult(check_toolchain(d))
# Check that the SDKMACHINE is valid, if it is set # Check that the SDKMACHINE is valid, if it is set
if d.getVar('SDKMACHINE', True): if d.getVar('SDKMACHINE'):
if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d): if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d):
status.addresult('Specified SDKMACHINE value is not valid\n') status.addresult('Specified SDKMACHINE value is not valid\n')
elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}": elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}":
@ -847,7 +847,7 @@ def check_sanity_everybuild(status, d):
status.addresult("Please use a umask which allows a+rx and u+rwx\n") status.addresult("Please use a umask which allows a+rx and u+rwx\n")
os.umask(omask) os.umask(omask)
if d.getVar('TARGET_ARCH', True) == "arm": if d.getVar('TARGET_ARCH') == "arm":
# This path is no longer user-readable in modern (very recent) Linux # This path is no longer user-readable in modern (very recent) Linux
try: try:
if os.path.exists("/proc/sys/vm/mmap_min_addr"): if os.path.exists("/proc/sys/vm/mmap_min_addr"):
@ -860,7 +860,7 @@ def check_sanity_everybuild(status, d):
except: except:
pass pass
oeroot = d.getVar('COREBASE', True) oeroot = d.getVar('COREBASE')
if oeroot.find('+') != -1: if oeroot.find('+') != -1:
status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.") status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.")
if oeroot.find('@') != -1: if oeroot.find('@') != -1:
@ -875,7 +875,7 @@ def check_sanity_everybuild(status, d):
'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \ 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \
'bzr', 'cvs', 'npm', 'sftp', 'ssh'] 'bzr', 'cvs', 'npm', 'sftp', 'ssh']
for mirror_var in mirror_vars: for mirror_var in mirror_vars:
mirrors = (d.getVar(mirror_var, True) or '').replace('\\n', '\n').split('\n') mirrors = (d.getVar(mirror_var) or '').replace('\\n', '\n').split('\n')
for mirror_entry in mirrors: for mirror_entry in mirrors:
mirror_entry = mirror_entry.strip() mirror_entry = mirror_entry.strip()
if not mirror_entry: if not mirror_entry:
@ -914,7 +914,7 @@ def check_sanity_everybuild(status, d):
check_symlink(mirror_base, d) check_symlink(mirror_base, d)
# Check that TMPDIR hasn't changed location since the last time we were run # Check that TMPDIR hasn't changed location since the last time we were run
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
checkfile = os.path.join(tmpdir, "saved_tmpdir") checkfile = os.path.join(tmpdir, "saved_tmpdir")
if os.path.exists(checkfile): if os.path.exists(checkfile):
with open(checkfile, "r") as f: with open(checkfile, "r") as f:
@ -951,8 +951,8 @@ def check_sanity(sanity_data):
status = SanityStatus() status = SanityStatus()
tmpdir = sanity_data.getVar('TMPDIR', True) tmpdir = sanity_data.getVar('TMPDIR')
sstate_dir = sanity_data.getVar('SSTATE_DIR', True) sstate_dir = sanity_data.getVar('SSTATE_DIR')
check_symlink(sstate_dir, sanity_data) check_symlink(sstate_dir, sanity_data)
@ -976,7 +976,7 @@ def check_sanity(sanity_data):
check_sanity_everybuild(status, sanity_data) check_sanity_everybuild(status, sanity_data)
sanity_version = int(sanity_data.getVar('SANITY_VERSION', True) or 1) sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1)
network_error = False network_error = False
# NATIVELSBSTRING var may have been overridden with "universal", so # NATIVELSBSTRING var may have been overridden with "universal", so
# get actual host distribution id and version # get actual host distribution id and version

View File

@ -29,10 +29,10 @@ IPK_GPG_SIGNATURE_TYPE ?= 'ASC'
python () { python () {
# Check configuration # Check configuration
for var in ('IPK_GPG_NAME', 'IPK_GPG_PASSPHRASE_FILE'): for var in ('IPK_GPG_NAME', 'IPK_GPG_PASSPHRASE_FILE'):
if not d.getVar(var, True): if not d.getVar(var):
raise_sanity_error("You need to define %s in the config" % var, d) raise_sanity_error("You need to define %s in the config" % var, d)
sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE", True) sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE")
if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": if sigtype.upper() != "ASC" and sigtype.upper() != "BIN":
raise_sanity_error("Bad value for IPK_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) raise_sanity_error("Bad value for IPK_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype)
} }
@ -42,11 +42,11 @@ def sign_ipk(d, ipk_to_sign):
bb.debug(1, 'Signing ipk: %s' % ipk_to_sign) bb.debug(1, 'Signing ipk: %s' % ipk_to_sign)
signer = get_signer(d, d.getVar('IPK_GPG_BACKEND', True)) signer = get_signer(d, d.getVar('IPK_GPG_BACKEND'))
sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE', True) sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE')
is_ascii_sig = (sig_type.upper() != "BIN") is_ascii_sig = (sig_type.upper() != "BIN")
signer.detach_sign(ipk_to_sign, signer.detach_sign(ipk_to_sign,
d.getVar('IPK_GPG_NAME', True), d.getVar('IPK_GPG_NAME'),
d.getVar('IPK_GPG_PASSPHRASE_FILE', True), d.getVar('IPK_GPG_PASSPHRASE_FILE'),
armor=is_ascii_sig) armor=is_ascii_sig)

View File

@ -31,10 +31,10 @@ PACKAGE_FEED_GPG_SIGNATURE_TYPE ?= 'ASC'
python () { python () {
# Check sanity of configuration # Check sanity of configuration
for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'): for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'):
if not d.getVar(var, True): if not d.getVar(var):
raise_sanity_error("You need to define %s in the config" % var, d) raise_sanity_error("You need to define %s in the config" % var, d)
sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE", True) sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE")
if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": if sigtype.upper() != "ASC" and sigtype.upper() != "BIN":
raise_sanity_error("Bad value for PACKAGE_FEED_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) raise_sanity_error("Bad value for PACKAGE_FEED_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype)
} }

View File

@ -22,11 +22,11 @@ RPM_GPG_BACKEND ?= 'local'
python () { python () {
if d.getVar('RPM_GPG_PASSPHRASE_FILE', True): if d.getVar('RPM_GPG_PASSPHRASE_FILE'):
raise_sanity_error('RPM_GPG_PASSPHRASE_FILE is replaced by RPM_GPG_PASSPHRASE', d) raise_sanity_error('RPM_GPG_PASSPHRASE_FILE is replaced by RPM_GPG_PASSPHRASE', d)
# Check configuration # Check configuration
for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE'): for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE'):
if not d.getVar(var, True): if not d.getVar(var):
raise_sanity_error("You need to define %s in the config" % var, d) raise_sanity_error("You need to define %s in the config" % var, d)
# Set the expected location of the public key # Set the expected location of the public key
@ -41,12 +41,12 @@ python sign_rpm () {
import glob import glob
from oe.gpg_sign import get_signer from oe.gpg_sign import get_signer
signer = get_signer(d, d.getVar('RPM_GPG_BACKEND', True)) signer = get_signer(d, d.getVar('RPM_GPG_BACKEND'))
rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR', True) + '/*') rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR') + '/*')
signer.sign_rpms(rpms, signer.sign_rpms(rpms,
d.getVar('RPM_GPG_NAME', True), d.getVar('RPM_GPG_NAME'),
d.getVar('RPM_GPG_PASSPHRASE', True)) d.getVar('RPM_GPG_PASSPHRASE'))
} }
do_package_index[depends] += "signing-keys:do_deploy" do_package_index[depends] += "signing-keys:do_deploy"

View File

@ -2,7 +2,7 @@ python siteconfig_do_siteconfig () {
shared_state = sstate_state_fromvars(d) shared_state = sstate_state_fromvars(d)
if shared_state['task'] != 'populate_sysroot': if shared_state['task'] != 'populate_sysroot':
return return
if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', True), 'site_config')): if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
bb.debug(1, "No site_config directory, skipping do_siteconfig") bb.debug(1, "No site_config directory, skipping do_siteconfig")
return return
bb.build.exec_func('do_siteconfig_gencache', d) bb.build.exec_func('do_siteconfig_gencache', d)

View File

@ -113,14 +113,14 @@ def siteinfo_data(d):
# Add in any extra user supplied data which may come from a BSP layer, removing the # Add in any extra user supplied data which may come from a BSP layer, removing the
# need to always change this class directly # need to always change this class directly
extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS", True) or "").split() extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS") or "").split()
for m in extra_siteinfo: for m in extra_siteinfo:
call = m + "(archinfo, osinfo, targetinfo, d)" call = m + "(archinfo, osinfo, targetinfo, d)"
locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d} locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d}
archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs) archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs)
hostarch = d.getVar("HOST_ARCH", True) hostarch = d.getVar("HOST_ARCH")
hostos = d.getVar("HOST_OS", True) hostos = d.getVar("HOST_OS")
target = "%s-%s" % (hostarch, hostos) target = "%s-%s" % (hostarch, hostos)
sitedata = [] sitedata = []
@ -144,7 +144,7 @@ python () {
d.setVar("SITEINFO_ENDIANNESS", "be") d.setVar("SITEINFO_ENDIANNESS", "be")
else: else:
bb.error("Unable to determine endianness for architecture '%s'" % bb.error("Unable to determine endianness for architecture '%s'" %
d.getVar("HOST_ARCH", True)) d.getVar("HOST_ARCH"))
bb.fatal("Please add your architecture to siteinfo.bbclass") bb.fatal("Please add your architecture to siteinfo.bbclass")
if "bit-32" in sitedata: if "bit-32" in sitedata:
@ -153,14 +153,14 @@ python () {
d.setVar("SITEINFO_BITS", "64") d.setVar("SITEINFO_BITS", "64")
else: else:
bb.error("Unable to determine bit size for architecture '%s'" % bb.error("Unable to determine bit size for architecture '%s'" %
d.getVar("HOST_ARCH", True)) d.getVar("HOST_ARCH"))
bb.fatal("Please add your architecture to siteinfo.bbclass") bb.fatal("Please add your architecture to siteinfo.bbclass")
} }
def siteinfo_get_files(d, aclocalcache = False): def siteinfo_get_files(d, aclocalcache = False):
sitedata = siteinfo_data(d) sitedata = siteinfo_data(d)
sitefiles = "" sitefiles = ""
for path in d.getVar("BBPATH", True).split(":"): for path in d.getVar("BBPATH").split(":"):
for element in sitedata: for element in sitedata:
filename = os.path.join(path, "site", element) filename = os.path.join(path, "site", element)
if os.path.exists(filename): if os.path.exists(filename):
@ -177,7 +177,7 @@ def siteinfo_get_files(d, aclocalcache = False):
# issues and the directory being created/removed whilst this code executes. This can happen # issues and the directory being created/removed whilst this code executes. This can happen
# when a multilib recipe is parsed along with its base variant which may be running at the time # when a multilib recipe is parsed along with its base variant which may be running at the time
# causing rare but nasty failures # causing rare but nasty failures
path_siteconfig = d.getVar('ACLOCALDIR', True) path_siteconfig = d.getVar('ACLOCALDIR')
if path_siteconfig and os.path.isdir(path_siteconfig): if path_siteconfig and os.path.isdir(path_siteconfig):
for i in os.listdir(path_siteconfig): for i in os.listdir(path_siteconfig):
if not i.endswith("_config"): if not i.endswith("_config"):

View File

@ -26,20 +26,20 @@ python do_spdx () {
import json, shutil import json, shutil
info = {} info = {}
info['workdir'] = d.getVar('WORKDIR', True) info['workdir'] = d.getVar('WORKDIR')
info['sourcedir'] = d.getVar('SPDX_S', True) info['sourcedir'] = d.getVar('SPDX_S')
info['pn'] = d.getVar('PN', True) info['pn'] = d.getVar('PN')
info['pv'] = d.getVar('PV', True) info['pv'] = d.getVar('PV')
info['spdx_version'] = d.getVar('SPDX_VERSION', True) info['spdx_version'] = d.getVar('SPDX_VERSION')
info['data_license'] = d.getVar('DATA_LICENSE', True) info['data_license'] = d.getVar('DATA_LICENSE')
sstatedir = d.getVar('SPDXSSTATEDIR', True) sstatedir = d.getVar('SPDXSSTATEDIR')
sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx") sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx")
manifest_dir = d.getVar('SPDX_MANIFEST_DIR', True) manifest_dir = d.getVar('SPDX_MANIFEST_DIR')
info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" ) info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" )
info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR', True) info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR')
info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" ) info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" )
# Make sure important dirs exist # Make sure important dirs exist
@ -74,9 +74,9 @@ python do_spdx () {
foss_license_info = cached_spdx['Licenses'] foss_license_info = cached_spdx['Licenses']
else: else:
## setup fossology command ## setup fossology command
foss_server = d.getVar('FOSS_SERVER', True) foss_server = d.getVar('FOSS_SERVER')
foss_flags = d.getVar('FOSS_WGET_FLAGS', True) foss_flags = d.getVar('FOSS_WGET_FLAGS')
foss_full_spdx = d.getVar('FOSS_FULL_SPDX', True) == "true" or False foss_full_spdx = d.getVar('FOSS_FULL_SPDX') == "true" or False
foss_command = "wget %s --post-file=%s %s"\ foss_command = "wget %s --post-file=%s %s"\
% (foss_flags, info['tar_file'], foss_server) % (foss_flags, info['tar_file'], foss_server)

View File

@ -11,7 +11,7 @@ def generate_sstatefn(spec, hash, d):
SSTATE_PKGARCH = "${PACKAGE_ARCH}" SSTATE_PKGARCH = "${PACKAGE_ARCH}"
SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:" SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:"
SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:" SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:"
SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC', True), d.getVar('BB_TASKHASH', True), d)}" SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_TASKHASH'), d)}"
SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}" SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}"
SSTATE_EXTRAPATH = "" SSTATE_EXTRAPATH = ""
SSTATE_EXTRAPATHWILDCARD = "" SSTATE_EXTRAPATHWILDCARD = ""
@ -34,7 +34,7 @@ SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}"
SSTATE_DUPWHITELIST += "${DEPLOY_DIR}/sdk/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt" SSTATE_DUPWHITELIST += "${DEPLOY_DIR}/sdk/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt"
SSTATE_SCAN_FILES ?= "*.la *-config *_config" SSTATE_SCAN_FILES ?= "*.la *-config *_config"
SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES", True).split())}" \) -type f' SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES").split())}" \) -type f'
BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}" BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}"
@ -84,7 +84,7 @@ python () {
d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${SDK_OS}")) d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${SDK_OS}"))
elif bb.data.inherits_class('cross-canadian', d): elif bb.data.inherits_class('cross-canadian', d):
d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}")) d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}"))
elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH", True) == "all": elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH") == "all":
d.setVar('SSTATE_PKGARCH', "allarch") d.setVar('SSTATE_PKGARCH', "allarch")
else: else:
d.setVar('SSTATE_MANMACH', d.expand("${PACKAGE_ARCH}")) d.setVar('SSTATE_MANMACH', d.expand("${PACKAGE_ARCH}"))
@ -100,7 +100,7 @@ python () {
scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}"
d.setVar('SSTATE_SCAN_CMD', scan_cmd) d.setVar('SSTATE_SCAN_CMD', scan_cmd)
unique_tasks = sorted(set((d.getVar('SSTATETASKS', True) or "").split())) unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split()))
d.setVar('SSTATETASKS', " ".join(unique_tasks)) d.setVar('SSTATETASKS', " ".join(unique_tasks))
for task in unique_tasks: for task in unique_tasks:
d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ") d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ")
@ -118,7 +118,7 @@ def sstate_init(task, d):
def sstate_state_fromvars(d, task = None): def sstate_state_fromvars(d, task = None):
if task is None: if task is None:
task = d.getVar('BB_CURRENTTASK', True) task = d.getVar('BB_CURRENTTASK')
if not task: if not task:
bb.fatal("sstate code running without task context?!") bb.fatal("sstate code running without task context?!")
task = task.replace("_setscene", "") task = task.replace("_setscene", "")
@ -200,7 +200,7 @@ def sstate_install(ss, d):
shareddirs.append(dstdir) shareddirs.append(dstdir)
# Check the file list for conflicts against files which already exist # Check the file list for conflicts against files which already exist
whitelist = (d.getVar("SSTATE_DUPWHITELIST", True) or "").split() whitelist = (d.getVar("SSTATE_DUPWHITELIST") or "").split()
match = [] match = []
for f in sharedfiles: for f in sharedfiles:
if os.path.exists(f): if os.path.exists(f):
@ -239,7 +239,7 @@ def sstate_install(ss, d):
"things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \ "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \
"be to resolve the conflict. If in doubt, please ask on the mailing list, " \ "be to resolve the conflict. If in doubt, please ask on the mailing list, " \
"sharing the error and filelist above." % \ "sharing the error and filelist above." % \
(d.getVar('PN', True), "\n ".join(match))) (d.getVar('PN'), "\n ".join(match)))
bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.")
# Write out the manifest # Write out the manifest
@ -260,7 +260,7 @@ def sstate_install(ss, d):
i = d2.expand("${SSTATE_MANIFESTS}/index-${SSTATE_MANMACH}") i = d2.expand("${SSTATE_MANIFESTS}/index-${SSTATE_MANMACH}")
l = bb.utils.lockfile(i + ".lock") l = bb.utils.lockfile(i + ".lock")
filedata = d.getVar("STAMP", True) + " " + d2.getVar("SSTATE_MANFILEPREFIX", True) + " " + d.getVar("WORKDIR", True) + "\n" filedata = d.getVar("STAMP") + " " + d2.getVar("SSTATE_MANFILEPREFIX") + " " + d.getVar("WORKDIR") + "\n"
manifests = [] manifests = []
if os.path.exists(i): if os.path.exists(i):
with open(i, "r") as f: with open(i, "r") as f:
@ -275,7 +275,7 @@ def sstate_install(ss, d):
if os.path.exists(state[1]): if os.path.exists(state[1]):
oe.path.copyhardlinktree(state[1], state[2]) oe.path.copyhardlinktree(state[1], state[2])
for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split(): for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split():
# All hooks should run in the SSTATE_INSTDIR # All hooks should run in the SSTATE_INSTDIR
bb.build.exec_func(postinst, d, (sstateinst,)) bb.build.exec_func(postinst, d, (sstateinst,))
@ -298,8 +298,8 @@ def sstate_installpkg(ss, d):
oe.path.remove(dir) oe.path.remove(dir)
sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task'])
sstatefetch = d.getVar('SSTATE_PKGNAME', True) + '_' + ss['task'] + ".tgz" sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz"
sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['task'] + ".tgz" sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz"
if not os.path.exists(sstatepkg): if not os.path.exists(sstatepkg):
pstaging_fetch(sstatefetch, sstatepkg, d) pstaging_fetch(sstatefetch, sstatepkg, d)
@ -313,12 +313,12 @@ def sstate_installpkg(ss, d):
d.setVar('SSTATE_INSTDIR', sstateinst) d.setVar('SSTATE_INSTDIR', sstateinst)
d.setVar('SSTATE_PKG', sstatepkg) d.setVar('SSTATE_PKG', sstatepkg)
if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
signer = get_signer(d, 'local') signer = get_signer(d, 'local')
if not signer.verify(sstatepkg + '.sig'): if not signer.verify(sstatepkg + '.sig'):
bb.warn("Cannot verify signature on sstate package %s" % sstatepkg) bb.warn("Cannot verify signature on sstate package %s" % sstatepkg)
for f in (d.getVar('SSTATEPREINSTFUNCS', True) or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS', True) or '').split(): for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split():
# All hooks should run in the SSTATE_INSTDIR # All hooks should run in the SSTATE_INSTDIR
bb.build.exec_func(f, d, (sstateinst,)) bb.build.exec_func(f, d, (sstateinst,))
@ -328,7 +328,7 @@ def sstate_installpkg(ss, d):
sstate_install(ss, d) sstate_install(ss, d)
for plain in ss['plaindirs']: for plain in ss['plaindirs']:
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
src = sstateinst + "/" + plain.replace(workdir, '') src = sstateinst + "/" + plain.replace(workdir, '')
dest = plain dest = plain
bb.utils.mkdirhier(src) bb.utils.mkdirhier(src)
@ -344,12 +344,12 @@ python sstate_hardcode_path_unpack () {
# sstate_hardcode_path(d) # sstate_hardcode_path(d)
import subprocess import subprocess
sstateinst = d.getVar('SSTATE_INSTDIR', True) sstateinst = d.getVar('SSTATE_INSTDIR')
fixmefn = sstateinst + "fixmepath" fixmefn = sstateinst + "fixmepath"
if os.path.isfile(fixmefn): if os.path.isfile(fixmefn):
staging = d.getVar('STAGING_DIR', True) staging = d.getVar('STAGING_DIR')
staging_target = d.getVar('STAGING_DIR_TARGET', True) staging_target = d.getVar('STAGING_DIR_TARGET')
staging_host = d.getVar('STAGING_DIR_HOST', True) staging_host = d.getVar('STAGING_DIR_HOST')
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
@ -358,9 +358,9 @@ python sstate_hardcode_path_unpack () {
else: else:
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
for fixmevar in extra_staging_fixmes.split(): for fixmevar in extra_staging_fixmes.split():
fixme_path = d.getVar(fixmevar, True) fixme_path = d.getVar(fixmevar)
sstate_sed_cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path) sstate_sed_cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path)
# Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
@ -377,12 +377,12 @@ python sstate_hardcode_path_unpack () {
def sstate_clean_cachefile(ss, d): def sstate_clean_cachefile(ss, d):
import oe.path import oe.path
sstatepkgfile = d.getVar('SSTATE_PATHSPEC', True) + "*_" + ss['task'] + ".tgz*" sstatepkgfile = d.getVar('SSTATE_PATHSPEC') + "*_" + ss['task'] + ".tgz*"
bb.note("Removing %s" % sstatepkgfile) bb.note("Removing %s" % sstatepkgfile)
oe.path.remove(sstatepkgfile) oe.path.remove(sstatepkgfile)
def sstate_clean_cachefiles(d): def sstate_clean_cachefiles(d):
for task in (d.getVar('SSTATETASKS', True) or "").split(): for task in (d.getVar('SSTATETASKS') or "").split():
ld = d.createCopy() ld = d.createCopy()
ss = sstate_state_fromvars(ld, task) ss = sstate_state_fromvars(ld, task)
sstate_clean_cachefile(ss, ld) sstate_clean_cachefile(ss, ld)
@ -424,7 +424,7 @@ def sstate_clean(ss, d):
import glob import glob
d2 = d.createCopy() d2 = d.createCopy()
stamp_clean = d.getVar("STAMPCLEAN", True) stamp_clean = d.getVar("STAMPCLEAN")
extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True) extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True)
if extrainf: if extrainf:
d2.setVar("SSTATE_MANMACH", extrainf) d2.setVar("SSTATE_MANMACH", extrainf)
@ -467,7 +467,7 @@ def sstate_clean(ss, d):
oe.path.remove(stfile) oe.path.remove(stfile)
# Removes the users/groups created by the package # Removes the users/groups created by the package
for cleanfunc in (d.getVar('SSTATECLEANFUNCS', True) or '').split(): for cleanfunc in (d.getVar('SSTATECLEANFUNCS') or '').split():
bb.build.exec_func(cleanfunc, d) bb.build.exec_func(cleanfunc, d)
sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX" sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX"
@ -475,13 +475,13 @@ sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX"
CLEANFUNCS += "sstate_cleanall" CLEANFUNCS += "sstate_cleanall"
python sstate_cleanall() { python sstate_cleanall() {
bb.note("Removing shared state for package %s" % d.getVar('PN', True)) bb.note("Removing shared state for package %s" % d.getVar('PN'))
manifest_dir = d.getVar('SSTATE_MANIFESTS', True) manifest_dir = d.getVar('SSTATE_MANIFESTS')
if not os.path.exists(manifest_dir): if not os.path.exists(manifest_dir):
return return
tasks = d.getVar('SSTATETASKS', True).split() tasks = d.getVar('SSTATETASKS').split()
for name in tasks: for name in tasks:
ld = d.createCopy() ld = d.createCopy()
shared_state = sstate_state_fromvars(ld, name) shared_state = sstate_state_fromvars(ld, name)
@ -497,10 +497,10 @@ python sstate_hardcode_path () {
# Note: the logic in this function needs to match the reverse logic # Note: the logic in this function needs to match the reverse logic
# in sstate_installpkg(ss, d) # in sstate_installpkg(ss, d)
staging = d.getVar('STAGING_DIR', True) staging = d.getVar('STAGING_DIR')
staging_target = d.getVar('STAGING_DIR_TARGET', True) staging_target = d.getVar('STAGING_DIR_TARGET')
staging_host = d.getVar('STAGING_DIR_HOST', True) staging_host = d.getVar('STAGING_DIR_HOST')
sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) sstate_builddir = d.getVar('SSTATE_BUILDDIR')
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_grep_cmd = "grep -l -e '%s'" % (staging) sstate_grep_cmd = "grep -l -e '%s'" % (staging)
@ -512,14 +512,14 @@ python sstate_hardcode_path () {
sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or ''
for fixmevar in extra_staging_fixmes.split(): for fixmevar in extra_staging_fixmes.split():
fixme_path = d.getVar(fixmevar, True) fixme_path = d.getVar(fixmevar)
sstate_sed_cmd += " -e 's:%s:FIXME_%s:g'" % (fixme_path, fixmevar) sstate_sed_cmd += " -e 's:%s:FIXME_%s:g'" % (fixme_path, fixmevar)
fixmefn = sstate_builddir + "fixmepath" fixmefn = sstate_builddir + "fixmepath"
sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD')
sstate_filelist_cmd = "tee %s" % (fixmefn) sstate_filelist_cmd = "tee %s" % (fixmefn)
# fixmepath file needs relative paths, drop sstate_builddir prefix # fixmepath file needs relative paths, drop sstate_builddir prefix
@ -568,17 +568,17 @@ def sstate_package(ss, d):
os.remove(path) os.remove(path)
os.symlink(base, path) os.symlink(base, path)
tmpdir = d.getVar('TMPDIR', True) tmpdir = d.getVar('TMPDIR')
sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task']) sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task'])
sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['task'] + ".tgz" sstatepkg = d.getVar('SSTATE_PKG') + '_'+ ss['task'] + ".tgz"
bb.utils.remove(sstatebuild, recurse=True) bb.utils.remove(sstatebuild, recurse=True)
bb.utils.mkdirhier(sstatebuild) bb.utils.mkdirhier(sstatebuild)
bb.utils.mkdirhier(os.path.dirname(sstatepkg)) bb.utils.mkdirhier(os.path.dirname(sstatepkg))
for state in ss['dirs']: for state in ss['dirs']:
if not os.path.exists(state[1]): if not os.path.exists(state[1]):
continue continue
if d.getVar('SSTATE_SKIP_CREATION', True) == '1': if d.getVar('SSTATE_SKIP_CREATION') == '1':
continue continue
srcbase = state[0].rstrip("/").rsplit('/', 1)[0] srcbase = state[0].rstrip("/").rsplit('/', 1)[0]
for walkroot, dirs, files in os.walk(state[1]): for walkroot, dirs, files in os.walk(state[1]):
@ -593,7 +593,7 @@ def sstate_package(ss, d):
bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0]))
oe.path.copyhardlinktree(state[1], sstatebuild + state[0]) oe.path.copyhardlinktree(state[1], sstatebuild + state[0])
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
for plain in ss['plaindirs']: for plain in ss['plaindirs']:
pdir = plain.replace(workdir, sstatebuild) pdir = plain.replace(workdir, sstatebuild)
bb.utils.mkdirhier(plain) bb.utils.mkdirhier(plain)
@ -603,9 +603,9 @@ def sstate_package(ss, d):
d.setVar('SSTATE_BUILDDIR', sstatebuild) d.setVar('SSTATE_BUILDDIR', sstatebuild)
d.setVar('SSTATE_PKG', sstatepkg) d.setVar('SSTATE_PKG', sstatepkg)
for f in (d.getVar('SSTATECREATEFUNCS', True) or '').split() + \ for f in (d.getVar('SSTATECREATEFUNCS') or '').split() + \
['sstate_create_package', 'sstate_sign_package'] + \ ['sstate_create_package', 'sstate_sign_package'] + \
(d.getVar('SSTATEPOSTCREATEFUNCS', True) or '').split(): (d.getVar('SSTATEPOSTCREATEFUNCS') or '').split():
# All hooks should run in SSTATE_BUILDDIR. # All hooks should run in SSTATE_BUILDDIR.
bb.build.exec_func(f, d, (sstatebuild,)) bb.build.exec_func(f, d, (sstatebuild,))
@ -617,7 +617,7 @@ def pstaging_fetch(sstatefetch, sstatepkg, d):
import bb.fetch2 import bb.fetch2
# Only try and fetch if the user has configured a mirror # Only try and fetch if the user has configured a mirror
mirrors = d.getVar('SSTATE_MIRRORS', True) mirrors = d.getVar('SSTATE_MIRRORS')
if not mirrors: if not mirrors:
return return
@ -635,14 +635,14 @@ def pstaging_fetch(sstatefetch, sstatepkg, d):
# if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
# we'll want to allow network access for the current set of fetches. # we'll want to allow network access for the current set of fetches.
if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1":
localdata.delVar('BB_NO_NETWORK') localdata.delVar('BB_NO_NETWORK')
# Try a fetch from the sstate mirror, if it fails just return and # Try a fetch from the sstate mirror, if it fails just return and
# we will build the package # we will build the package
uris = ['file://{0};downloadfilename={0}'.format(sstatefetch), uris = ['file://{0};downloadfilename={0}'.format(sstatefetch),
'file://{0}.siginfo;downloadfilename={0}.siginfo'.format(sstatefetch)] 'file://{0}.siginfo;downloadfilename={0}.siginfo'.format(sstatefetch)]
if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False):
uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)] uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)]
for srcuri in uris: for srcuri in uris:
@ -671,7 +671,7 @@ python sstate_task_postfunc () {
sstate_install(shared_state, d) sstate_install(shared_state, d)
for intercept in shared_state['interceptfuncs']: for intercept in shared_state['interceptfuncs']:
bb.build.exec_func(intercept, d, (d.getVar("WORKDIR", True),)) bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),))
omask = os.umask(0o002) omask = os.umask(0o002)
if omask != 0o002: if omask != 0o002:
bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask)
@ -709,13 +709,13 @@ sstate_create_package () {
python sstate_sign_package () { python sstate_sign_package () {
from oe.gpg_sign import get_signer from oe.gpg_sign import get_signer
if d.getVar('SSTATE_SIG_KEY', True): if d.getVar('SSTATE_SIG_KEY'):
signer = get_signer(d, 'local') signer = get_signer(d, 'local')
sstate_pkg = d.getVar('SSTATE_PKG', True) sstate_pkg = d.getVar('SSTATE_PKG')
if os.path.exists(sstate_pkg + '.sig'): if os.path.exists(sstate_pkg + '.sig'):
os.unlink(sstate_pkg + '.sig') os.unlink(sstate_pkg + '.sig')
signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None, signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None,
d.getVar('SSTATE_SIG_PASSPHRASE', True), armor=False) d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False)
} }
# #
@ -746,7 +746,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
splithashfn = sq_hashfn[task].split(" ") splithashfn = sq_hashfn[task].split(" ")
spec = splithashfn[1] spec = splithashfn[1]
if splithashfn[0] == "True": if splithashfn[0] == "True":
extrapath = d.getVar("NATIVELSBSTRING", True) + "/" extrapath = d.getVar("NATIVELSBSTRING") + "/"
else: else:
extrapath = "" extrapath = ""
@ -785,7 +785,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
missed.append(task) missed.append(task)
bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile)
mirrors = d.getVar("SSTATE_MIRRORS", True) mirrors = d.getVar("SSTATE_MIRRORS")
if mirrors: if mirrors:
# Copy the data object and override DL_DIR and SRC_URI # Copy the data object and override DL_DIR and SRC_URI
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
@ -801,7 +801,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
# if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
# we'll want to allow network access for the current set of fetches. # we'll want to allow network access for the current set of fetches.
if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1":
localdata.delVar('BB_NO_NETWORK') localdata.delVar('BB_NO_NETWORK')
whitelist = bb.runqueue.get_setscene_enforce_whitelist(d) whitelist = bb.runqueue.get_setscene_enforce_whitelist(d)
@ -868,7 +868,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False):
if whitelist and missing: if whitelist and missing:
bb.fatal('Required artifacts were unavailable - exiting') bb.fatal('Required artifacts were unavailable - exiting')
inheritlist = d.getVar("INHERIT", True) inheritlist = d.getVar("INHERIT")
if "toaster" in inheritlist: if "toaster" in inheritlist:
evdata = {'missed': [], 'found': []}; evdata = {'missed': [], 'found': []};
for task in missed: for task in missed:
@ -977,15 +977,15 @@ sstate_eventhandler[eventmask] = "bb.build.TaskSucceeded"
python sstate_eventhandler() { python sstate_eventhandler() {
d = e.data d = e.data
# When we write an sstate package we rewrite the SSTATE_PKG # When we write an sstate package we rewrite the SSTATE_PKG
spkg = d.getVar('SSTATE_PKG', True) spkg = d.getVar('SSTATE_PKG')
if not spkg.endswith(".tgz"): if not spkg.endswith(".tgz"):
taskname = d.getVar("BB_RUNTASK", True)[3:] taskname = d.getVar("BB_RUNTASK")[3:]
spec = d.getVar('SSTATE_PKGSPEC', True) spec = d.getVar('SSTATE_PKGSPEC')
swspec = d.getVar('SSTATE_SWSPEC', True) swspec = d.getVar('SSTATE_SWSPEC')
if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec: if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec:
d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}") d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}")
d.setVar("SSTATE_EXTRAPATH", "") d.setVar("SSTATE_EXTRAPATH", "")
sstatepkg = d.getVar('SSTATE_PKG', True) sstatepkg = d.getVar('SSTATE_PKG')
bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d) bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d)
} }
@ -1004,7 +1004,7 @@ python sstate_eventhandler2() {
stamps = e.stamps.values() stamps = e.stamps.values()
removeworkdir = (d.getVar("SSTATE_PRUNE_OBSOLETEWORKDIR", False) == "1") removeworkdir = (d.getVar("SSTATE_PRUNE_OBSOLETEWORKDIR", False) == "1")
seen = [] seen = []
for a in d.getVar("SSTATE_ARCHS", True).split(): for a in d.getVar("SSTATE_ARCHS").split():
toremove = [] toremove = []
i = d.expand("${SSTATE_MANIFESTS}/index-" + a) i = d.expand("${SSTATE_MANIFESTS}/index-" + a)
if not os.path.exists(i): if not os.path.exists(i):

View File

@ -69,8 +69,8 @@ sysroot_stage_all() {
python sysroot_strip () { python sysroot_strip () {
import stat, errno import stat, errno
dvar = d.getVar('SYSROOT_DESTDIR', True) dvar = d.getVar('SYSROOT_DESTDIR')
pn = d.getVar('PN', True) pn = d.getVar('PN')
os.chdir(dvar) os.chdir(dvar)
@ -103,9 +103,9 @@ python sysroot_strip () {
elffiles = {} elffiles = {}
inodes = {} inodes = {}
libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
if (d.getVar('INHIBIT_SYSROOT_STRIP', True) != '1'): if (d.getVar('INHIBIT_SYSROOT_STRIP') != '1'):
# #
# First lets figure out all of the files we may have to process # First lets figure out all of the files we may have to process
# #
@ -136,7 +136,7 @@ python sysroot_strip () {
elf_file = isELF(file) elf_file = isELF(file)
if elf_file & 1: if elf_file & 1:
if elf_file & 2: if elf_file & 2:
if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
else: else:
bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)) bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn))
@ -154,7 +154,7 @@ python sysroot_strip () {
# #
# Now strip them (in parallel) # Now strip them (in parallel)
# #
strip = d.getVar("STRIP", True) strip = d.getVar("STRIP")
sfiles = [] sfiles = []
for file in elffiles: for file in elffiles:
elf_file = int(elffiles[file]) elf_file = int(elffiles[file])
@ -211,13 +211,13 @@ def sysroot_checkhashes(covered, tasknames, fnids, fns, d, invalidtasks = None):
python do_populate_sysroot () { python do_populate_sysroot () {
bb.build.exec_func("sysroot_stage_all", d) bb.build.exec_func("sysroot_stage_all", d)
bb.build.exec_func("sysroot_strip", d) bb.build.exec_func("sysroot_strip", d)
for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS', True) or '').split(): for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS') or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
pn = d.getVar("PN", True) pn = d.getVar("PN")
multiprov = d.getVar("MULTI_PROVIDER_WHITELIST", True).split() multiprov = d.getVar("MULTI_PROVIDER_WHITELIST").split()
provdir = d.expand("${SYSROOT_DESTDIR}${base_prefix}/sysroot-providers/") provdir = d.expand("${SYSROOT_DESTDIR}${base_prefix}/sysroot-providers/")
bb.utils.mkdirhier(provdir) bb.utils.mkdirhier(provdir)
for p in d.getVar("PROVIDES", True).split(): for p in d.getVar("PROVIDES").split():
if p in multiprov: if p in multiprov:
continue continue
p = p.replace("/", "_") p = p.replace("/", "_")

View File

@ -84,12 +84,12 @@ python build_syslinux_cfg () {
import copy import copy
import sys import sys
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR')
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
labels = d.getVar('LABELS', True) labels = d.getVar('LABELS')
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -98,7 +98,7 @@ python build_syslinux_cfg () {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('SYSLINUX_CFG', True) cfile = d.getVar('SYSLINUX_CFG')
if not cfile: if not cfile:
bb.fatal('Unable to read SYSLINUX_CFG') bb.fatal('Unable to read SYSLINUX_CFG')
@ -109,39 +109,39 @@ python build_syslinux_cfg () {
cfgfile.write('# Automatically created by OE\n') cfgfile.write('# Automatically created by OE\n')
opts = d.getVar('SYSLINUX_OPTS', True) opts = d.getVar('SYSLINUX_OPTS')
if opts: if opts:
for opt in opts.split(';'): for opt in opts.split(';'):
cfgfile.write('%s\n' % opt) cfgfile.write('%s\n' % opt)
allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS', True) allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS')
if allowoptions: if allowoptions:
cfgfile.write('ALLOWOPTIONS %s\n' % allowoptions) cfgfile.write('ALLOWOPTIONS %s\n' % allowoptions)
else: else:
cfgfile.write('ALLOWOPTIONS 1\n') cfgfile.write('ALLOWOPTIONS 1\n')
syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE', True) syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE')
syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY', True) syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY')
syslinux_serial = d.getVar('SYSLINUX_SERIAL', True) syslinux_serial = d.getVar('SYSLINUX_SERIAL')
if syslinux_serial: if syslinux_serial:
cfgfile.write('SERIAL %s\n' % syslinux_serial) cfgfile.write('SERIAL %s\n' % syslinux_serial)
menu = (d.getVar('AUTO_SYSLINUXMENU', True) == "1") menu = (d.getVar('AUTO_SYSLINUXMENU') == "1")
if menu and syslinux_serial: if menu and syslinux_serial:
cfgfile.write('DEFAULT Graphics console %s\n' % (labels.split()[0])) cfgfile.write('DEFAULT Graphics console %s\n' % (labels.split()[0]))
else: else:
cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
timeout = d.getVar('SYSLINUX_TIMEOUT', True) timeout = d.getVar('SYSLINUX_TIMEOUT')
if timeout: if timeout:
cfgfile.write('TIMEOUT %s\n' % timeout) cfgfile.write('TIMEOUT %s\n' % timeout)
else: else:
cfgfile.write('TIMEOUT 50\n') cfgfile.write('TIMEOUT 50\n')
prompt = d.getVar('SYSLINUX_PROMPT', True) prompt = d.getVar('SYSLINUX_PROMPT')
if prompt: if prompt:
cfgfile.write('PROMPT %s\n' % prompt) cfgfile.write('PROMPT %s\n' % prompt)
else: else:
@ -151,14 +151,14 @@ python build_syslinux_cfg () {
cfgfile.write('ui vesamenu.c32\n') cfgfile.write('ui vesamenu.c32\n')
cfgfile.write('menu title Select kernel options and boot kernel\n') cfgfile.write('menu title Select kernel options and boot kernel\n')
cfgfile.write('menu tabmsg Press [Tab] to edit, [Return] to select\n') cfgfile.write('menu tabmsg Press [Tab] to edit, [Return] to select\n')
splash = d.getVar('SYSLINUX_SPLASH', True) splash = d.getVar('SYSLINUX_SPLASH')
if splash: if splash:
cfgfile.write('menu background splash.lss\n') cfgfile.write('menu background splash.lss\n')
for label in labels.split(): for label in labels.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar('OVERRIDES', True) overrides = localdata.getVar('OVERRIDES')
if not overrides: if not overrides:
bb.fatal('OVERRIDES not defined') bb.fatal('OVERRIDES not defined')
@ -170,19 +170,19 @@ python build_syslinux_cfg () {
btypes = [ [ "Graphics console ", syslinux_default_console ], btypes = [ [ "Graphics console ", syslinux_default_console ],
[ "Serial console ", syslinux_serial_tty ] ] [ "Serial console ", syslinux_serial_tty ] ]
root= d.getVar('SYSLINUX_ROOT', True) root= d.getVar('SYSLINUX_ROOT')
if not root: if not root:
bb.fatal('SYSLINUX_ROOT not defined') bb.fatal('SYSLINUX_ROOT not defined')
for btype in btypes: for btype in btypes:
cfgfile.write('LABEL %s%s\nKERNEL /vmlinuz\n' % (btype[0], label)) cfgfile.write('LABEL %s%s\nKERNEL /vmlinuz\n' % (btype[0], label))
exargs = d.getVar('SYSLINUX_KERNEL_ARGS', True) exargs = d.getVar('SYSLINUX_KERNEL_ARGS')
if exargs: if exargs:
btype[1] += " " + exargs btype[1] += " " + exargs
append = localdata.getVar('APPEND', True) append = localdata.getVar('APPEND')
initrd = localdata.getVar('INITRD', True) initrd = localdata.getVar('INITRD')
append = root + " " + append append = root + " " + append
cfgfile.write('APPEND ') cfgfile.write('APPEND ')

View File

@ -63,8 +63,8 @@ efi_hddimg_populate() {
} }
python build_efi_cfg() { python build_efi_cfg() {
s = d.getVar("S", True) s = d.getVar("S")
labels = d.getVar('LABELS', True) labels = d.getVar('LABELS')
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -73,7 +73,7 @@ python build_efi_cfg() {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('SYSTEMD_BOOT_CFG', True) cfile = d.getVar('SYSTEMD_BOOT_CFG')
try: try:
cfgfile = open(cfile, 'w') cfgfile = open(cfile, 'w')
except OSError: except OSError:
@ -81,7 +81,7 @@ python build_efi_cfg() {
cfgfile.write('# Automatically created by OE\n') cfgfile.write('# Automatically created by OE\n')
cfgfile.write('default %s\n' % (labels.split()[0])) cfgfile.write('default %s\n' % (labels.split()[0]))
timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT', True) timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT')
if timeout: if timeout:
cfgfile.write('timeout %s\n' % timeout) cfgfile.write('timeout %s\n' % timeout)
else: else:
@ -91,7 +91,7 @@ python build_efi_cfg() {
for label in labels.split(): for label in labels.split():
localdata = d.createCopy() localdata = d.createCopy()
overrides = localdata.getVar('OVERRIDES', True) overrides = localdata.getVar('OVERRIDES')
if not overrides: if not overrides:
bb.fatal('OVERRIDES not defined') bb.fatal('OVERRIDES not defined')
@ -107,8 +107,8 @@ python build_efi_cfg() {
entrycfg.write('title %s\n' % label) entrycfg.write('title %s\n' % label)
entrycfg.write('linux /vmlinuz\n') entrycfg.write('linux /vmlinuz\n')
append = localdata.getVar('APPEND', True) append = localdata.getVar('APPEND')
initrd = localdata.getVar('INITRD', True) initrd = localdata.getVar('INITRD')
if initrd: if initrd:
entrycfg.write('initrd /initrd\n') entrycfg.write('initrd /initrd\n')

View File

@ -71,12 +71,12 @@ python systemd_populate_packages() {
def get_package_var(d, var, pkg): def get_package_var(d, var, pkg):
val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip() val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip()
if val == "": if val == "":
val = (d.getVar(var, True) or "").strip() val = (d.getVar(var) or "").strip()
return val return val
# Check if systemd-packages already included in PACKAGES # Check if systemd-packages already included in PACKAGES
def systemd_check_package(pkg_systemd): def systemd_check_package(pkg_systemd):
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES')
if not pkg_systemd in packages.split(): if not pkg_systemd in packages.split():
bb.error('%s does not appear in package list, please add it' % pkg_systemd) bb.error('%s does not appear in package list, please add it' % pkg_systemd)
@ -90,23 +90,23 @@ python systemd_populate_packages() {
localdata.prependVar("OVERRIDES", pkg + ":") localdata.prependVar("OVERRIDES", pkg + ":")
bb.data.update_data(localdata) bb.data.update_data(localdata)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += localdata.getVar('systemd_postinst', True) postinst += localdata.getVar('systemd_postinst')
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = d.getVar('pkg_prerm_%s' % pkg, True) prerm = d.getVar('pkg_prerm_%s' % pkg)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += localdata.getVar('systemd_prerm', True) prerm += localdata.getVar('systemd_prerm')
d.setVar('pkg_prerm_%s' % pkg, prerm) d.setVar('pkg_prerm_%s' % pkg, prerm)
# Add files to FILES_*-systemd if existent and not already done # Add files to FILES_*-systemd if existent and not already done
def systemd_append_file(pkg_systemd, file_append): def systemd_append_file(pkg_systemd, file_append):
appended = False appended = False
if os.path.exists(oe.path.join(d.getVar("D", True), file_append)): if os.path.exists(oe.path.join(d.getVar("D"), file_append)):
var_name = "FILES_" + pkg_systemd var_name = "FILES_" + pkg_systemd
files = d.getVar(var_name, False) or "" files = d.getVar(var_name, False) or ""
if file_append not in files.split(): if file_append not in files.split():
@ -118,7 +118,7 @@ python systemd_populate_packages() {
def systemd_add_files_and_parse(pkg_systemd, path, service, keys): def systemd_add_files_and_parse(pkg_systemd, path, service, keys):
# avoid infinite recursion # avoid infinite recursion
if systemd_append_file(pkg_systemd, oe.path.join(path, service)): if systemd_append_file(pkg_systemd, oe.path.join(path, service)):
fullpath = oe.path.join(d.getVar("D", True), path, service) fullpath = oe.path.join(d.getVar("D"), path, service)
if service.find('.service') != -1: if service.find('.service') != -1:
# for *.service add *@.service # for *.service add *@.service
service_base = service.replace('.service', '') service_base = service.replace('.service', '')
@ -141,9 +141,9 @@ python systemd_populate_packages() {
# Check service-files and call systemd_add_files_and_parse for each entry # Check service-files and call systemd_add_files_and_parse for each entry
def systemd_check_services(): def systemd_check_services():
searchpaths = [oe.path.join(d.getVar("sysconfdir", True), "systemd", "system"),] searchpaths = [oe.path.join(d.getVar("sysconfdir"), "systemd", "system"),]
searchpaths.append(d.getVar("systemd_system_unitdir", True)) searchpaths.append(d.getVar("systemd_system_unitdir"))
systemd_packages = d.getVar('SYSTEMD_PACKAGES', True) systemd_packages = d.getVar('SYSTEMD_PACKAGES')
keys = 'Also' keys = 'Also'
# scan for all in SYSTEMD_SERVICE[] # scan for all in SYSTEMD_SERVICE[]
@ -158,11 +158,11 @@ python systemd_populate_packages() {
base = re.sub('@[^.]+.', '@.', service) base = re.sub('@[^.]+.', '@.', service)
for path in searchpaths: for path in searchpaths:
if os.path.exists(oe.path.join(d.getVar("D", True), path, service)): if os.path.exists(oe.path.join(d.getVar("D"), path, service)):
path_found = path path_found = path
break break
elif base is not None: elif base is not None:
if os.path.exists(oe.path.join(d.getVar("D", True), path, base)): if os.path.exists(oe.path.join(d.getVar("D"), path, base)):
path_found = path path_found = path
break break
@ -172,10 +172,10 @@ python systemd_populate_packages() {
bb.fatal("SYSTEMD_SERVICE_%s value %s does not exist" % (pkg_systemd, service)) bb.fatal("SYSTEMD_SERVICE_%s value %s does not exist" % (pkg_systemd, service))
# Run all modifications once when creating package # Run all modifications once when creating package
if os.path.exists(d.getVar("D", True)): if os.path.exists(d.getVar("D")):
for pkg in d.getVar('SYSTEMD_PACKAGES', True).split(): for pkg in d.getVar('SYSTEMD_PACKAGES').split():
systemd_check_package(pkg) systemd_check_package(pkg)
if d.getVar('SYSTEMD_SERVICE_' + pkg, True): if d.getVar('SYSTEMD_SERVICE_' + pkg):
systemd_generate_package_scripts(pkg) systemd_generate_package_scripts(pkg)
systemd_check_services() systemd_check_services()
} }
@ -185,7 +185,7 @@ PACKAGESPLITFUNCS_prepend = "systemd_populate_packages "
python rm_systemd_unitdir (){ python rm_systemd_unitdir (){
import shutil import shutil
if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d): if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d):
systemd_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_unitdir', True)) systemd_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_unitdir'))
if os.path.exists(systemd_unitdir): if os.path.exists(systemd_unitdir):
shutil.rmtree(systemd_unitdir) shutil.rmtree(systemd_unitdir)
systemd_libdir = os.path.dirname(systemd_unitdir) systemd_libdir = os.path.dirname(systemd_unitdir)
@ -196,12 +196,12 @@ do_install[postfuncs] += "rm_systemd_unitdir "
python rm_sysvinit_initddir (){ python rm_sysvinit_initddir (){
import shutil import shutil
sysv_initddir = oe.path.join(d.getVar("D", True), (d.getVar('INIT_D_DIR', True) or "/etc/init.d")) sysv_initddir = oe.path.join(d.getVar("D"), (d.getVar('INIT_D_DIR') or "/etc/init.d"))
if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and \ if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and \
not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \ not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \
os.path.exists(sysv_initddir): os.path.exists(sysv_initddir):
systemd_system_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_system_unitdir', True)) systemd_system_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_system_unitdir'))
# If systemd_system_unitdir contains anything, delete sysv_initddir # If systemd_system_unitdir contains anything, delete sysv_initddir
if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)): if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)):

Some files were not shown because too many files have changed in this diff Show More