getVar/setVar cleanups

Complete the bb.data.getVar/setVar replacements with accesses
directly to the data store object.

(From OE-Core rev: 2864ff6a4b3c3f9b3bbb6d2597243cc5d3715939)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2011-11-25 14:25:16 +00:00
parent 71fded5145
commit 0a434ac101
27 changed files with 152 additions and 159 deletions

View File

@ -360,12 +360,12 @@ python () {
d.setVarFlag('do_compile', 'umask', 022) d.setVarFlag('do_compile', 'umask', 022)
deps = (d.getVarFlag('do_install', 'depends') or "").split() deps = (d.getVarFlag('do_install', 'depends') or "").split()
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_install', 'depends', " ".join(deps),d) d.setVarFlag('do_install', 'depends', " ".join(deps))
d.setVarFlag('do_install', 'fakeroot', 1) d.setVarFlag('do_install', 'fakeroot', 1)
d.setVarFlag('do_install', 'umask', 022) d.setVarFlag('do_install', 'umask', 022)
deps = (d.getVarFlag('do_package', 'depends') or "").split() deps = (d.getVarFlag('do_package', 'depends') or "").split()
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package', 'depends', " ".join(deps),d) d.setVarFlag('do_package', 'depends', " ".join(deps))
d.setVarFlag('do_package', 'fakeroot', 1) d.setVarFlag('do_package', 'fakeroot', 1)
d.setVarFlag('do_package', 'umask', 022) d.setVarFlag('do_package', 'umask', 022)
d.setVarFlag('do_package_setscene', 'fakeroot', 1) d.setVarFlag('do_package_setscene', 'fakeroot', 1)

View File

@ -20,7 +20,7 @@ python () {
sdkarchs = [] sdkarchs = []
for arch in archs: for arch in archs:
sdkarchs.append(arch + '-nativesdk') sdkarchs.append(arch + '-nativesdk')
bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d) d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs))
} }
MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}" MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}"

View File

@ -31,31 +31,31 @@ python do_distrodata_np() {
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-nativesdk") != -1: if pn.find("-nativesdk") != -1:
pnstripped = pn.split("-nativesdk") pnstripped = pn.split("-nativesdk")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-crosssdk") != -1: if pn.find("-crosssdk") != -1:
pnstripped = pn.split("-crosssdk") pnstripped = pn.split("-crosssdk")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
"""generate package information from .bb file""" """generate package information from .bb file"""
@ -130,19 +130,19 @@ python do_distrodata() {
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
"""generate package information from .bb file""" """generate package information from .bb file"""
@ -308,8 +308,8 @@ python do_checkpkg() {
which is designed for check purpose but we override check command for our own purpose which is designed for check purpose but we override check command for our own purpose
""" """
ld = bb.data.createCopy(d) ld = bb.data.createCopy(d)
bb.data.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \ d.setVar('CHECKCOMMAND_wget', "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12\" '${URI}'" \
% tmpf.name, d) % tmpf.name)
bb.data.update_data(ld) bb.data.update_data(ld)
try: try:
@ -452,19 +452,19 @@ python do_checkpkg() {
if pname.find("-native") != -1: if pname.find("-native") != -1:
pnstripped = pname.split("-native") pnstripped = pname.split("-native")
bb.note("Native Split: %s" % pnstripped) bb.note("Native Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pname.find("-cross") != -1: if pname.find("-cross") != -1:
pnstripped = pname.split("-cross") pnstripped = pname.split("-cross")
bb.note("cross Split: %s" % pnstripped) bb.note("cross Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
if pname.find("-initial") != -1: if pname.find("-initial") != -1:
pnstripped = pname.split("-initial") pnstripped = pname.split("-initial")
bb.note("initial Split: %s" % pnstripped) bb.note("initial Split: %s" % pnstripped)
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
pdesc = localdata.getVar('DESCRIPTION', True) pdesc = localdata.getVar('DESCRIPTION', True)

View File

@ -45,7 +45,7 @@ python populate_packages_append () {
schemas.append(f) schemas.append(f)
if schemas != []: if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg) bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d) d.setVar('SCHEMA_FILES', " ".join(schemas))
postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1) postinst = d.getVar('pkg_postinst_%s' % pkg, 1) or d.getVar('pkg_postinst', 1)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'

View File

@ -53,7 +53,7 @@ python() {
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d): if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d):
deps = (d.getVarFlag('do_setscene', 'depends') or "").split() deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
deps.append('strace-native:do_populate_sysroot') deps.append('strace-native:do_populate_sysroot')
bb.data.setVarFlag('do_setscene', 'depends', " ".join(deps), d) d.setVarFlag('do_setscene', 'depends', " ".join(deps))
logdir = bb.data.expand("${TRACE_LOGDIR}", d) logdir = bb.data.expand("${TRACE_LOGDIR}", d)
bb.utils.mkdirhier(logdir) bb.utils.mkdirhier(logdir)
else: else:

View File

@ -437,7 +437,7 @@ python populate_packages_prepend () {
else: else:
rdepends = [] rdepends = []
rdepends.extend(get_dependencies(file, pattern, format)) rdepends.extend(get_dependencies(file, pattern, format))
bb.data.setVar('RDEPENDS_' + pkg, ' '.join(rdepends), d) d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends))
module_deps = parse_depmod() module_deps = parse_depmod()
module_regex = '^(.*)\.k?o$' module_regex = '^(.*)\.k?o$'
@ -464,10 +464,10 @@ python populate_packages_prepend () {
for pkg in packages[1:]: for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends: if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg) metapkg_rdepends.append(pkg)
bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
packages.append(metapkg) packages.append(metapkg)
bb.data.setVar('PACKAGES', ' '.join(packages), d) d.setVar('PACKAGES', ' '.join(packages))
} }
# Support checking the kernel size since some kernels need to reside in partitions # Support checking the kernel size since some kernels need to reside in partitions

View File

@ -10,7 +10,7 @@
GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice"
python __anonymous () { python __anonymous () {
enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", 1) enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True)
pn = d.getVar("PN", True) pn = d.getVar("PN", True)
if pn.endswith("-initial"): if pn.endswith("-initial"):
@ -19,15 +19,15 @@ python __anonymous () {
if enabled and int(enabled): if enabled and int(enabled):
import re import re
target_arch = d.getVar("TARGET_ARCH", 1) target_arch = d.getVar("TARGET_ARCH", True)
binary_arches = d.getVar("BINARY_LOCALE_ARCHES", 1) or "" binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or ""
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "" use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or ""
for regexp in binary_arches.split(" "): for regexp in binary_arches.split(" "):
r = re.compile(regexp) r = re.compile(regexp)
if r.match(target_arch): if r.match(target_arch):
depends = d.getVar("DEPENDS", 1) depends = d.getVar("DEPENDS", True)
if use_cross_localedef == "1" : if use_cross_localedef == "1" :
depends = "%s cross-localedef-native" % depends depends = "%s cross-localedef-native" % depends
else: else:
@ -109,19 +109,19 @@ inherit qemu
python package_do_split_gconvs () { python package_do_split_gconvs () {
import os, re import os, re
if (d.getVar('PACKAGE_NO_GCONV', 1) == '1'): if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
bb.note("package requested not splitting gconvs") bb.note("package requested not splitting gconvs")
return return
if not d.getVar('PACKAGES', 1): if not d.getVar('PACKAGES', True):
return return
bpn = d.getVar('BPN', 1) bpn = d.getVar('BPN', True)
libdir = d.getVar('libdir', 1) libdir = d.getVar('libdir', True)
if not libdir: if not libdir:
bb.error("libdir not defined") bb.error("libdir not defined")
return return
datadir = d.getVar('datadir', 1) datadir = d.getVar('datadir', True)
if not datadir: if not datadir:
bb.error("datadir not defined") bb.error("datadir not defined")
return return
@ -144,9 +144,9 @@ python package_do_split_gconvs () {
deps.append(dp) deps.append(dp)
f.close() f.close()
if deps != []: if deps != []:
bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc': if bpn != 'glibc':
bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
description='gconv module for character set %s', hook=calc_gconv_deps, \ description='gconv module for character set %s', hook=calc_gconv_deps, \
@ -165,9 +165,9 @@ python package_do_split_gconvs () {
deps.append(dp) deps.append(dp)
f.close() f.close()
if deps != []: if deps != []:
bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc': if bpn != 'glibc':
bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
@ -185,23 +185,23 @@ python package_do_split_gconvs () {
deps.append(dp) deps.append(dp)
f.close() f.close()
if deps != []: if deps != []:
bb.data.setVar('RDEPENDS_%s' % pkg, " ".join(deps), d) d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc': if bpn != 'glibc':
bb.data.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'), d) d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
description='locale definition for %s', hook=calc_locale_deps, extra_depends='') description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
bb.data.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv', d) d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", 1) use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
dot_re = re.compile("(.*)\.(.*)") dot_re = re.compile("(.*)\.(.*)")
#GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales #GLIBC_GENERATE_LOCALES var specifies which locales to be supported, empty or "all" means all locales
if use_bin != "precompiled": if use_bin != "precompiled":
supported = d.getVar('GLIBC_GENERATE_LOCALES', 1) supported = d.getVar('GLIBC_GENERATE_LOCALES', True)
if not supported or supported == "all": if not supported or supported == "all":
f = open(base_path_join(d.getVar('WORKDIR', 1), "SUPPORTED"), "r") f = open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED"), "r")
supported = f.readlines() supported = f.readlines()
f.close() f.close()
else: else:
@ -218,7 +218,7 @@ python package_do_split_gconvs () {
supported.append(dbase[0] + d2) supported.append(dbase[0] + d2)
# Collate the locales by base and encoding # Collate the locales by base and encoding
utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', 1) or 0) utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
encodings = {} encodings = {}
for l in supported: for l in supported:
l = l[:-1] l = l[:-1]
@ -233,12 +233,12 @@ python package_do_split_gconvs () {
encodings[locale].append(charset) encodings[locale].append(charset)
def output_locale_source(name, pkgname, locale, encoding): def output_locale_source(name, pkgname, locale, encoding):
bb.data.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
(bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)), d) (bpn, legitimize_package_name(locale), bpn, legitimize_package_name(encoding)))
bb.data.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', 1) \ d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
% (locale, encoding, locale), d) % (locale, encoding, locale))
bb.data.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', 1) % \ d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
(locale, encoding, locale), d) (locale, encoding, locale))
def output_locale_binary_rdepends(name, pkgname, locale, encoding): def output_locale_binary_rdepends(name, pkgname, locale, encoding):
m = re.match("(.*)\.(.*)", name) m = re.match("(.*)\.(.*)", name)
@ -246,23 +246,23 @@ python package_do_split_gconvs () {
libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-","")) libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
else: else:
libc_name = name libc_name = name
bb.data.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
% (bpn, libc_name)), d) % (bpn, libc_name)))
rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split() rprovides = (d.getVar('RPROVIDES_%s' % pkgname, True) or "").split()
rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name))) rprovides.append(legitimize_package_name('%s-binary-localedata-%s' % (bpn, libc_name)))
bb.data.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides), d) d.setVar('RPROVIDES_%s' % pkgname, " ".join(rprovides))
commands = {} commands = {}
def output_locale_binary(name, pkgname, locale, encoding): def output_locale_binary(name, pkgname, locale, encoding):
treedir = base_path_join(d.getVar("WORKDIR", 1), "locale-tree") treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
ldlibdir = base_path_join(treedir, d.getVar("base_libdir", 1)) ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
path = d.getVar("PATH", 1) path = d.getVar("PATH", True)
i18npath = base_path_join(treedir, datadir, "i18n") i18npath = base_path_join(treedir, datadir, "i18n")
gconvpath = base_path_join(treedir, "iconvdata") gconvpath = base_path_join(treedir, "iconvdata")
outputpath = base_path_join(treedir, libdir, "locale") outputpath = base_path_join(treedir, libdir, "locale")
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", 1) or "0" use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
if use_cross_localedef == "1": if use_cross_localedef == "1":
target_arch = d.getVar('TARGET_ARCH', True) target_arch = d.getVar('TARGET_ARCH', True)
locale_arch_options = { \ locale_arch_options = { \
@ -292,9 +292,9 @@ python package_do_split_gconvs () {
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \ --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name) % (treedir, datadir, locale, encoding, name)
qemu_options = bb.data.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', 1), d, 1) qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
if not qemu_options: if not qemu_options:
qemu_options = d.getVar('QEMU_OPTIONS', 1) qemu_options = d.getVar('QEMU_OPTIONS', True)
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
@ -307,7 +307,7 @@ python package_do_split_gconvs () {
def output_locale(name, locale, encoding): def output_locale(name, locale, encoding):
pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
bb.data.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', 1)), d) d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
rprovides = ' virtual-locale-%s' % legitimize_package_name(name) rprovides = ' virtual-locale-%s' % legitimize_package_name(name)
m = re.match("(.*)_(.*)", name) m = re.match("(.*)_(.*)", name)
if m: if m:
@ -347,7 +347,7 @@ python package_do_split_gconvs () {
bb.note(" " + " ".join(non_utf8)) bb.note(" " + " ".join(non_utf8))
if use_bin == "compile": if use_bin == "compile":
makefile = base_path_join(d.getVar("WORKDIR", 1), "locale-tree", "Makefile") makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
m = open(makefile, "w") m = open(makefile, "w")
m.write("all: %s\n\n" % " ".join(commands.keys())) m.write("all: %s\n\n" % " ".join(commands.keys()))
for cmd in commands: for cmd in commands:

View File

@ -121,7 +121,7 @@ python native_virtclass_handler () {
newdeps.append(dep + "-native") newdeps.append(dep + "-native")
else: else:
newdeps.append(dep) newdeps.append(dep)
bb.data.setVar(varname, " ".join(newdeps), d) d.setVar(varname, " ".join(newdeps))
map_dependencies("DEPENDS", e.data) map_dependencies("DEPENDS", e.data)
for pkg in (e.data.getVar("PACKAGES", True).split() + [""]): for pkg in (e.data.getVar("PACKAGES", True).split() + [""]):
@ -139,7 +139,7 @@ python native_virtclass_handler () {
provides = provides.replace(prov, prov + "-native") provides = provides.replace(prov, prov + "-native")
e.data.setVar("PROVIDES", provides) e.data.setVar("PROVIDES", provides)
bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native", e.data) e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native")
} }
addhandler native_virtclass_handler addhandler native_virtclass_handler

View File

@ -15,7 +15,7 @@ python () {
sdkarchs = [] sdkarchs = []
for arch in archs: for arch in archs:
sdkarchs.append(arch + '-nativesdk') sdkarchs.append(arch + '-nativesdk')
bb.data.setVar('PACKAGE_ARCHS', " ".join(sdkarchs), d) d.setVar('PACKAGE_ARCHS', " ".join(sdkarchs))
} }
STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}" STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}"
@ -66,7 +66,7 @@ python nativesdk_virtclass_handler () {
if not pn.endswith("-nativesdk"): if not pn.endswith("-nativesdk"):
return return
bb.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk", e.data) e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk")
} }
python () { python () {
@ -91,7 +91,7 @@ python () {
newdeps.append(dep.replace("-nativesdk", "") + "-nativesdk") newdeps.append(dep.replace("-nativesdk", "") + "-nativesdk")
else: else:
newdeps.append(dep) newdeps.append(dep)
bb.data.setVar(varname, " ".join(newdeps), d) d.setVar(varname, " ".join(newdeps))
map_dependencies("DEPENDS", d) map_dependencies("DEPENDS", d)
#for pkg in (d.getVar("PACKAGES", True).split() + [""]): #for pkg in (d.getVar("PACKAGES", True).split() + [""]):

View File

@ -151,7 +151,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
the_files.append(fp % m.group(1)) the_files.append(fp % m.group(1))
else: else:
the_files.append(aux_files_pattern_verbatim % m.group(1)) the_files.append(aux_files_pattern_verbatim % m.group(1))
bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) d.setVar('FILES_' + pkg, " ".join(the_files))
if extra_depends != '': if extra_depends != '':
the_depends = d.getVar('RDEPENDS_' + pkg, True) the_depends = d.getVar('RDEPENDS_' + pkg, True)
if the_depends: if the_depends:
@ -165,11 +165,11 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
if postrm: if postrm:
d.setVar('pkg_postrm_' + pkg, postrm) d.setVar('pkg_postrm_' + pkg, postrm)
else: else:
bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) d.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o))
if callable(hook): if callable(hook):
hook(f, pkg, file_regex, output_pattern, m.group(1)) hook(f, pkg, file_regex, output_pattern, m.group(1))
bb.data.setVar('PACKAGES', ' '.join(packages), d) d.setVar('PACKAGES', ' '.join(packages))
PACKAGE_DEPENDS += "file-native" PACKAGE_DEPENDS += "file-native"
@ -183,7 +183,7 @@ python () {
deps = (d.getVarFlag('do_package', 'deptask') or "").split() deps = (d.getVarFlag('do_package', 'deptask') or "").split()
# shlibs requires any DEPENDS to have already packaged for the *.list files # shlibs requires any DEPENDS to have already packaged for the *.list files
deps.append("do_package") deps.append("do_package")
bb.data.setVarFlag('do_package', 'deptask', " ".join(deps), d) d.setVarFlag('do_package', 'deptask', " ".join(deps))
elif not bb.data.inherits_class('image', d): elif not bb.data.inherits_class('image', d):
d.setVar("PACKAGERDEPTASK", "") d.setVar("PACKAGERDEPTASK", "")
} }
@ -202,7 +202,7 @@ def splitfile(file, debugfile, debugsrcdir, d):
pathprefix = "export PATH=%s; " % d.getVar('PATH', True) pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
objcopy = d.getVar("OBJCOPY", True) objcopy = d.getVar("OBJCOPY", True)
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
workdir = bb.data.expand("${WORKDIR}", d) workdir = d.getVar("WORKDIR", True)
workparentdir = os.path.dirname(workdir) workparentdir = os.path.dirname(workdir)
sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d)
@ -245,7 +245,7 @@ def splitfile2(debugsrcdir, d):
strip = d.getVar("STRIP", True) strip = d.getVar("STRIP", True)
objcopy = d.getVar("OBJCOPY", True) objcopy = d.getVar("OBJCOPY", True)
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d) debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
workdir = bb.data.expand("${WORKDIR}", d) workdir = d.getVar("WORKDIR", True)
workparentdir = os.path.dirname(workdir) workparentdir = os.path.dirname(workdir)
workbasedir = os.path.basename(workdir) workbasedir = os.path.basename(workdir)
sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d) sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d)
@ -341,7 +341,7 @@ def runtime_mapping_rename (varname, d):
else: else:
new_depends.append(new_depend) new_depends.append(new_depend)
bb.data.setVar(varname, " ".join(new_depends) or None, d) d.setVar(varname, " ".join(new_depends) or None)
#bb.note("%s after: %s" % (varname, d.getVar(varname, True))) #bb.note("%s after: %s" % (varname, d.getVar(varname, True)))
@ -399,15 +399,15 @@ python package_do_split_locales() {
ln = legitimize_package_name(l) ln = legitimize_package_name(l)
pkg = pn + '-locale-' + ln pkg = pn + '-locale-' + ln
packages.append(pkg) packages.append(pkg)
bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d) d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
bb.data.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln), d) d.setVar('RDEPENDS_' + pkg, '%s virtual-locale-%s' % (mainpkg, ln))
bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d) d.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln))
bb.data.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l), d) d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
bb.data.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l), d) d.setVar('DESCRIPTION_' + pkg, '%s This package contains language translation files for the %s locale.' % (description, l))
if locale_section: if locale_section:
d.setVar('SECTION_' + pkg, locale_section) d.setVar('SECTION_' + pkg, locale_section)
bb.data.setVar('PACKAGES', ' '.join(packages), d) d.setVar('PACKAGES', ' '.join(packages))
# Disabled by RP 18/06/07 # Disabled by RP 18/06/07
# Wildcards aren't supported in debian # Wildcards aren't supported in debian
@ -417,7 +417,7 @@ python package_do_split_locales() {
# Probably breaks since virtual-locale- isn't provided anywhere # Probably breaks since virtual-locale- isn't provided anywhere
#rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split() #rdep = (d.getVar('RDEPENDS_%s' % mainpkg, True) or d.getVar('RDEPENDS', True) or "").split()
#rdep.append('%s-locale*' % pn) #rdep.append('%s-locale*' % pn)
#bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) #d.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep))
} }
python perform_packagecopy () { python perform_packagecopy () {
@ -1018,7 +1018,7 @@ python populate_packages () {
break break
if found == False: if found == False:
bb.note("%s contains dangling symlink to %s" % (pkg, l)) bb.note("%s contains dangling symlink to %s" % (pkg, l))
bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d) d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
} }
populate_packages[dirs] = "${D}" populate_packages[dirs] = "${D}"
@ -1033,11 +1033,11 @@ python emit_pkgdata() {
c = codecs.getencoder("string_escape") c = codecs.getencoder("string_escape")
return c(str)[0] return c(str)[0]
val = bb.data.getVar('%s_%s' % (var, pkg), d, True) val = d.getVar('%s_%s' % (var, pkg), True)
if val: if val:
f.write('%s_%s: %s\n' % (var, pkg, encode(val))) f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
return return
val = bb.data.getVar('%s' % (var), d, True) val = d.getVar('%s' % (var), True)
if val: if val:
f.write('%s: %s\n' % (var, encode(val))) f.write('%s: %s\n' % (var, encode(val)))
return return
@ -1159,12 +1159,12 @@ python package_do_filedeps() {
if len(provides) > 0: if len(provides) > 0:
provides_files.append(file) provides_files.append(file)
key = "FILERPROVIDES_" + file + "_" + pkg key = "FILERPROVIDES_" + file + "_" + pkg
bb.data.setVar(key, " ".join(provides), d) d.setVar(key, " ".join(provides))
if len(requires) > 0: if len(requires) > 0:
requires_files.append(file) requires_files.append(file)
key = "FILERDEPENDS_" + file + "_" + pkg key = "FILERDEPENDS_" + file + "_" + pkg
bb.data.setVar(key, " ".join(requires), d) d.setVar(key, " ".join(requires))
# Determine dependencies # Determine dependencies
for pkg in packages.split(): for pkg in packages.split():
@ -1181,8 +1181,8 @@ python package_do_filedeps() {
process_deps(dep_pipe, pkg, f, provides_files, requires_files) process_deps(dep_pipe, pkg, f, provides_files, requires_files)
bb.data.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files), d) d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files))
bb.data.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files), d) d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files))
} }
SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs" SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs"
@ -1461,7 +1461,7 @@ python package_do_pkgconfig () {
if m: if m:
name = m.group(1) name = m.group(1)
val = m.group(2) val = m.group(2)
bb.data.setVar(name, bb.data.expand(val, pd), pd) pd.setVar(name, bb.data.expand(val, pd))
continue continue
m = field_re.match(l) m = field_re.match(l)
if m: if m:
@ -1519,7 +1519,7 @@ python package_do_pkgconfig () {
python read_shlibdeps () { python read_shlibdeps () {
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES', True).split()
for pkg in packages: for pkg in packages:
rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, 0) or d.getVar('RDEPENDS', 0) or "") rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "")
for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d) depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d)
@ -1529,7 +1529,7 @@ python read_shlibdeps () {
fd.close() fd.close()
for l in lines: for l in lines:
rdepends[l.rstrip()] = "" rdepends[l.rstrip()] = ""
bb.data.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False), d) d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))
} }
python package_depchains() { python package_depchains() {
@ -1569,7 +1569,7 @@ python package_depchains() {
rreclist[pkgname] = "" rreclist[pkgname] = ""
#bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d) d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):
@ -1590,7 +1590,7 @@ python package_depchains() {
rreclist[pkgname] = "" rreclist[pkgname] = ""
#bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist))) #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
bb.data.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False), d) d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))
def add_dep(list, dep): def add_dep(list, dep):
dep = dep.split(' (')[0].strip() dep = dep.split(' (')[0].strip()

View File

@ -11,7 +11,7 @@ DPKG_ARCH ?= "${TARGET_ARCH}"
PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs"
python package_deb_fn () { python package_deb_fn () {
bb.data.setVar('PKGFN', d.getVar('PKG'), d) d.setVar('PKGFN', d.getVar('PKG'))
} }
addtask package_deb_install addtask package_deb_install
@ -409,7 +409,7 @@ python () {
deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split() deps = (d.getVarFlag('do_package_write_deb', 'depends') or "").split()
deps.append('dpkg-native:do_populate_sysroot') deps.append('dpkg-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_deb', 'depends', " ".join(deps), d) d.setVarFlag('do_package_write_deb', 'depends', " ".join(deps))
d.setVarFlag('do_package_write_deb', 'fakeroot', "1") d.setVarFlag('do_package_write_deb', 'fakeroot', "1")
d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1") d.setVarFlag('do_package_write_deb_setscene', 'fakeroot', "1")

View File

@ -11,7 +11,7 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks"
OPKGBUILDCMD ??= "opkg-build" OPKGBUILDCMD ??= "opkg-build"
python package_ipk_fn () { python package_ipk_fn () {
bb.data.setVar('PKGFN', d.getVar('PKG'), d) d.setVar('PKGFN', d.getVar('PKG'))
} }
python package_ipk_install () { python package_ipk_install () {
@ -441,7 +441,7 @@ python () {
deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split() deps = (d.getVarFlag('do_package_write_ipk', 'depends') or "").split()
deps.append('opkg-utils-native:do_populate_sysroot') deps.append('opkg-utils-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps), d) d.setVarFlag('do_package_write_ipk', 'depends', " ".join(deps))
d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1") d.setVarFlag('do_package_write_ipk_setscene', 'fakeroot', "1")
} }

View File

@ -8,7 +8,7 @@ RPMBUILD="rpmbuild"
PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms"
python package_rpm_fn () { python package_rpm_fn () {
bb.data.setVar('PKGFN', d.getVar('PKG'), d) d.setVar('PKGFN', d.getVar('PKG'))
} }
python package_rpm_install () { python package_rpm_install () {
@ -467,7 +467,7 @@ python write_specfile () {
ver = ver.replace(pv, reppv) ver = ver.replace(pv, reppv)
newdeps_dict[dep] = ver newdeps_dict[dep] = ver
depends = bb.utils.join_deps(newdeps_dict) depends = bb.utils.join_deps(newdeps_dict)
bb.data.setVar(varname, depends.strip(), d) d.setVar(varname, depends.strip())
# We need to change the style the dependency from BB to RPM # We need to change the style the dependency from BB to RPM
# This needs to happen AFTER the mapping_rename_hook # This needs to happen AFTER the mapping_rename_hook
@ -969,7 +969,7 @@ python () {
deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split() deps = (d.getVarFlag('do_package_write_rpm', 'depends') or "").split()
deps.append('rpm-native:do_populate_sysroot') deps.append('rpm-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps), d) d.setVarFlag('do_package_write_rpm', 'depends', " ".join(deps))
d.setVarFlag('do_package_write_rpm', 'fakeroot', 1) d.setVarFlag('do_package_write_rpm', 'fakeroot', 1)
d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1) d.setVarFlag('do_package_write_rpm_setscene', 'fakeroot', 1)
} }

View File

@ -69,7 +69,7 @@ python do_package_tar () {
if not overrides: if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined') raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata) overrides = bb.data.expand(overrides, localdata)
bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata) localdata.setVar('OVERRIDES', '%s:%s' % (overrides, pkg))
bb.data.update_data(localdata) bb.data.update_data(localdata)
@ -95,7 +95,7 @@ python () {
deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split() deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split()
deps.append('tar-native:do_populate_sysroot') deps.append('tar-native:do_populate_sysroot')
deps.append('virtual/fakeroot-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot')
bb.data.setVarFlag('do_package_write_tar', 'depends', " ".join(deps), d) d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps))
d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") d.setVarFlag('do_package_write_ipk', 'fakeroot', "1")
} }

View File

@ -29,13 +29,13 @@ python do_distribute_sources () {
if url.basename == '*': if url.basename == '*':
import os.path import os.path
dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath))) dest_dir = os.path.basename(os.path.dirname(os.path.abspath(url.localpath)))
bb.data.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir), d) d.setVar('DEST', "%s_%s/" % (d.getVar('PF', 1), dest_dir))
else: else:
bb.data.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename), d) d.setVar('DEST', "%s_%s" % (d.getVar('PF', 1), url.basename))
else: else:
d.setVar('DEST', '') d.setVar('DEST', '')
bb.data.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license), d) d.setVar('SRC_DISTRIBUTEDIR', "%s/%s" % (sources_dir, license))
bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d) bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d)
} }

View File

@ -20,18 +20,18 @@ SSTATEPOSTINSTFUNCS ?= ""
python () { python () {
if bb.data.inherits_class('native', d): if bb.data.inherits_class('native', d):
bb.data.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'), d) d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'))
elif bb.data.inherits_class('cross', d): elif bb.data.inherits_class('cross', d):
bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d), d) d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d))
bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d), d) d.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d))
elif bb.data.inherits_class('crosssdk', d): elif bb.data.inherits_class('crosssdk', d):
bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d), d) d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d))
elif bb.data.inherits_class('nativesdk', d): elif bb.data.inherits_class('nativesdk', d):
bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d), d) d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d))
elif bb.data.inherits_class('cross-canadian', d): elif bb.data.inherits_class('cross-canadian', d):
bb.data.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d), d) d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d))
else: else:
bb.data.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d), d) d.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d))
# These classes encode staging paths into their scripts data so can only be # These classes encode staging paths into their scripts data so can only be
# reused if we manipulate the paths # reused if we manipulate the paths

View File

@ -22,6 +22,6 @@ python () {
for pkg in packages: for pkg in packages:
for postfix in ['-dbg', '-dev']: for postfix in ['-dbg', '-dev']:
genpackages.append(pkg+postfix) genpackages.append(pkg+postfix)
bb.data.setVar('PACKAGES', ' '.join(packages+genpackages), d) d.setVar('PACKAGES', ' '.join(packages+genpackages))
} }

View File

@ -45,7 +45,7 @@ python populate_packages_prepend () {
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", 1) overrides = localdata.getVar("OVERRIDES", 1)
bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata) localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
bb.data.update_data(localdata) bb.data.update_data(localdata)
""" """

View File

@ -285,7 +285,7 @@ DOTDEBUG-dbg = "${bindir}/.debug ${sbindir}/.debug ${libexecdir}/.debug ${libdir
DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug" DEBUGFILEDIRECTORY-dbg = "/usr/lib/debug /usr/src/debug"
FILES_${PN}-dbg = "${@bb.data.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', 1) == 'debug-file-directory'], d, 1)}" FILES_${PN}-dbg = "${@d.getVar(['DOTDEBUG-dbg', 'DEBUGFILEDIRECTORY-dbg'][d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory'], True)}"
SECTION_${PN}-dbg = "devel" SECTION_${PN}-dbg = "devel"
ALLOW_EMPTY_${PN}-dbg = "1" ALLOW_EMPTY_${PN}-dbg = "1"
@ -502,7 +502,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types"
# Disabled until the option works properly -feliminate-dwarf2-dups # Disabled until the option works properly -feliminate-dwarf2-dups
FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}"
DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe"
SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', 1) == '1'], d, 1)}" SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', True) == '1'], True)}"
SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION" SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION"
BUILD_OPTIMIZATION = "-O2 -pipe" BUILD_OPTIMIZATION = "-O2 -pipe"

View File

@ -284,19 +284,19 @@ def compare_in_distro_packages_list(distro_check_dir, d):
if pn.find("-native") != -1: if pn.find("-native") != -1:
pnstripped = pn.split("-native") pnstripped = pn.split("-native")
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
recipe_name = pnstripped[0] recipe_name = pnstripped[0]
if pn.find("-cross") != -1: if pn.find("-cross") != -1:
pnstripped = pn.split("-cross") pnstripped = pn.split("-cross")
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
recipe_name = pnstripped[0] recipe_name = pnstripped[0]
if pn.find("-initial") != -1: if pn.find("-initial") != -1:
pnstripped = pn.split("-initial") pnstripped = pn.split("-initial")
bb.data.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True), localdata) localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata) bb.data.update_data(localdata)
recipe_name = pnstripped[0] recipe_name = pnstripped[0]

View File

@ -81,17 +81,15 @@ python () {
if "${OE_DEL}": if "${OE_DEL}":
d.setVar('configmangle_append', "${OE_DEL}" + "\n") d.setVar('configmangle_append', "${OE_DEL}" + "\n")
if "${OE_FEATURES}": if "${OE_FEATURES}":
bb.data.setVar('configmangle_append', d.setVar('configmangle_append',
"/^### DISTRO FEATURES$/a\\\n%s\n\n" % "/^### DISTRO FEATURES$/a\\\n%s\n\n" %
("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))), ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))))
d) d.setVar('configmangle_append',
bb.data.setVar('configmangle_append',
"/^### CROSS$/a\\\n%s\n" % "/^### CROSS$/a\\\n%s\n" %
("\\n".join(["CONFIG_CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"", ("\\n".join(["CONFIG_CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"",
"CONFIG_EXTRA_CFLAGS=\"${CFLAGS}\"" "CONFIG_EXTRA_CFLAGS=\"${CFLAGS}\""
]) ])
), ))
d)
} }
do_prepare_config () { do_prepare_config () {

View File

@ -80,11 +80,11 @@ RDEPENDS_task-core-sdk = "\
# rreclist.append('%s-dev' % name) # rreclist.append('%s-dev' % name)
# #
# oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or '' # oldrrec = d.getVar('RRECOMMENDS_%s' % newpkg) or ''
# bb.data.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist), d) # d.setVar('RRECOMMENDS_%s' % newpkg, oldrrec + ' ' + ' '.join(rreclist))
# # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg))) # # bb.note('RRECOMMENDS_%s = "%s"' % (newpkg, d.getVar('RRECOMMENDS_%s' % newpkg)))
# #
# # bb.note('pkgs is %s' % pkgs) # # bb.note('pkgs is %s' % pkgs)
# bb.data.setVar('PACKAGES', ' '.join(pkgs), d) # d.setVar('PACKAGES', ' '.join(pkgs))
#} #}
# #
#PACKAGES_DYNAMIC = "task-core-sdk-*" #PACKAGES_DYNAMIC = "task-core-sdk-*"

View File

@ -141,11 +141,10 @@ python () {
if "${OE_DEL}": if "${OE_DEL}":
d.setVar('configmangle_append', "${OE_DEL}" + "\n") d.setVar('configmangle_append', "${OE_DEL}" + "\n")
if "${OE_FEATURES}": if "${OE_FEATURES}":
bb.data.setVar('configmangle_append', d.setVar('configmangle_append',
"/^### DISTRO FEATURES$/a\\\n%s\n\n" % "/^### DISTRO FEATURES$/a\\\n%s\n\n" %
("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))), ("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))))
d) d.setVar('configmangle_append',
bb.data.setVar('configmangle_append',
"/^### CROSS$/a\\\n%s\n" % "/^### CROSS$/a\\\n%s\n" %
("\\n".join(["CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"", ("\\n".join(["CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"",
"UCLIBC_EXTRA_CFLAGS=\"${UCLIBC_EXTRA_CFLAGS}\"", "UCLIBC_EXTRA_CFLAGS=\"${UCLIBC_EXTRA_CFLAGS}\"",
@ -154,22 +153,18 @@ python () {
"DEVEL_PREFIX=\"/${prefix}\"", "DEVEL_PREFIX=\"/${prefix}\"",
"SHARED_LIB_LOADER_PREFIX=\"/lib\"", "SHARED_LIB_LOADER_PREFIX=\"/lib\"",
]) ])
), ))
d) d.setVar('configmangle_append',
bb.data.setVar('configmangle_append',
"/^### TGT$/a\\\nTARGET_ARCH=\"%s\"\\nTARGET_%s=y\n" % "/^### TGT$/a\\\nTARGET_ARCH=\"%s\"\\nTARGET_%s=y\n" %
("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"), ("${UCLIBC_ARCH}", "${UCLIBC_ARCH}"))
d) d.setVar('configmangle_append',
bb.data.setVar('configmangle_append', "/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', True) in [ 'soft' ]]))
"/^### FPU$/a\\\n%s\n\n" % (["UCLIBC_HAS_FPU=y","# UCLIBC_HAS_FPU is not set"][d.getVar('TARGET_FPU', 1) in [ 'soft' ]]), d)
if "${UCLIBC_ENDIAN}": if "${UCLIBC_ENDIAN}":
bb.data.setVar('configmangle_append', d.setVar('configmangle_append',
"/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"), "/^### ABI$/a\\\nARCH_WANTS_%s_ENDIAN=y\n\n" % ("${UCLIBC_ENDIAN}"))
d)
if "${UCLIBC_ABI}": if "${UCLIBC_ABI}":
bb.data.setVar('configmangle_append', d.setVar('configmangle_append',
"/^### ABI$/a\\\nCONFIG_%s=y\n\n" % ("${UCLIBC_ABI}"), "/^### ABI$/a\\\nCONFIG_%s=y\n\n" % ("${UCLIBC_ABI}"))
d)
} }
do_patch_append() { do_patch_append() {

View File

@ -21,7 +21,7 @@ SRC_URI[sha256sum] = "7fe62180f08ef5f0a0062fb444591e349cae2ab5af6ad834599f5c654e
DEPENDS += "bigreqsproto xproto xextproto xtrans libxau xcmiscproto \ DEPENDS += "bigreqsproto xproto xextproto xtrans libxau xcmiscproto \
libxdmcp xf86bigfontproto kbproto inputproto xproto-native" libxdmcp xf86bigfontproto kbproto inputproto xproto-native"
FILESDIR = "${@os.path.dirname(bb.data.getVar('FILE',d,1))}/libx11" FILESDIR = "${@os.path.dirname(d.getVar('FILE', True))}/libx11"
EXTRA_OECONF += "--without-xcb --disable-udc --disable-xcms --disable-xlocale --with-keysymdefdir=${STAGING_INCDIR}/X11" EXTRA_OECONF += "--without-xcb --disable-udc --disable-xcms --disable-xlocale --with-keysymdefdir=${STAGING_INCDIR}/X11"
CFLAGS += "-D_GNU_SOURCE" CFLAGS += "-D_GNU_SOURCE"

View File

@ -2,8 +2,8 @@ LIBV = "0.10"
python populate_packages_prepend () { python populate_packages_prepend () {
gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d) gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d)
postinst = d.getVar('plugin_postinst', 1) postinst = d.getVar('plugin_postinst', True)
glibdir = bb.data.expand('${libdir}', d) glibdir = d.getVar('libdir', True)
do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True) do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d)) do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d))
@ -19,7 +19,7 @@ python populate_packages_prepend () {
for pkg in packages[1:]: for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'): if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-static'):
metapkg_rdepends.append(pkg) metapkg_rdepends.append(pkg)
bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d) d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package') d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
} }

View File

@ -43,14 +43,14 @@ python __anonymous () {
pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
# NOTE: the headers for QtAssistantClient are different # NOTE: the headers for QtAssistantClient are different
incname = name.replace("QtAssistantClient", "QtAssistant") incname = name.replace("QtAssistantClient", "QtAssistant")
bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals(), d) d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s${QT_LIBINFIX}.so.*" % locals())
bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s${QT_LIBINFIX}.prl d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s${QT_LIBINFIX}.prl
${libdir}/lib%(name)s${QT_LIBINFIX}.a ${libdir}/lib%(name)s${QT_LIBINFIX}.a
${libdir}/lib%(name)s${QT_LIBINFIX}.la ${libdir}/lib%(name)s${QT_LIBINFIX}.la
${libdir}/lib%(name)s${QT_LIBINFIX}.so ${libdir}/lib%(name)s${QT_LIBINFIX}.so
${includedir}/${QT_DIR_NAME}/%(incname)s ${includedir}/${QT_DIR_NAME}/%(incname)s
${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals(), d) ${libdir}/pkgconfig/%(name)s${QT_LIBINFIX}.pc""" % locals())
bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals(), d) d.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s${QT_LIBINFIX}.so*" % locals())
d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg")
lib_packages.append(pkg) lib_packages.append(pkg)
dev_packages.append("%s-dev" % pkg) dev_packages.append("%s-dev" % pkg)
@ -60,22 +60,22 @@ python __anonymous () {
for name in d.getVar("QT_EXTRA_LIBS", 1).split(): for name in d.getVar("QT_EXTRA_LIBS", 1).split():
pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4" pkg = d.getVar("QT_BASE_LIB", True) + name.lower().replace("qt", "").replace("_", "-") + "4"
bb.data.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals(), d) d.setVar("FILES_%s" % pkg, "${libdir}/lib%(name)s.so.*" % locals())
bb.data.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl d.setVar("FILES_%s-dev" % pkg, """${libdir}/lib%(name)s.prl
${libdir}/lib%(name)s.a ${libdir}/lib%(name)s.a
${libdir}/lib%(name)s.la ${libdir}/lib%(name)s.la
${libdir}/lib%(name)s.so ${libdir}/lib%(name)s.so
${includedir}/${QT_DIR_NAME}/%(incname)s ${includedir}/${QT_DIR_NAME}/%(incname)s
${libdir}/pkgconfig/%(name)s.pc""" % locals(), d) ${libdir}/pkgconfig/%(name)s.pc""" % locals())
bb.data.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals(), d) d.setVar("FILES_%s-dbg" % pkg, "${libdir}/.debug/lib%(name)s.so*" % locals())
d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg") d.setVar("RRECOMMENDS_%s-dbg" % pkg, "${PN}-dbg")
lib_packages.append(pkg) lib_packages.append(pkg)
dev_packages.append("%s-dev" % pkg) dev_packages.append("%s-dev" % pkg)
dbg_packages.append("%s-dbg" % pkg) dbg_packages.append("%s-dbg" % pkg)
bb.data.setVar("LIB_PACKAGES", " ".join(lib_packages), d) d.setVar("LIB_PACKAGES", " ".join(lib_packages))
bb.data.setVar("DEV_PACKAGES", " ".join(dev_packages), d) d.setVar("DEV_PACKAGES", " ".join(dev_packages))
bb.data.setVar("DBG_PACKAGES", " ".join(dbg_packages), d) d.setVar("DBG_PACKAGES", " ".join(dbg_packages))
} }
OTHER_PACKAGES = "\ OTHER_PACKAGES = "\
@ -261,7 +261,7 @@ python populate_packages_prepend() {
packages = "%s %s-dbg" % (packages, package) packages = "%s %s-dbg" % (packages, package)
file_name = os.path.join(plugin_dir_dbg, os.path.basename(file)) file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
d.setVar("FILES_%s-dbg" % package, file_name) d.setVar("FILES_%s-dbg" % package, file_name)
bb.data.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package), d) d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package))
d.setVar('PACKAGES', packages) d.setVar('PACKAGES', packages)

View File

@ -57,8 +57,8 @@ python __anonymous () {
packages.append(pkg) packages.append(pkg)
if not d.getVar("FILES_%s" % pkg, 1): if not d.getVar("FILES_%s" % pkg, 1):
d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib) d.setVar("FILES_%s" % pkg, "${libdir}/libboost_%s*.so.*" % lib)
bb.data.setVar("BOOST_PACKAGES", " ".join(packages), d) d.setVar("BOOST_PACKAGES", " ".join(packages))
bb.data.setVar("BJAM_EXTRA", " ".join(extras), d) d.setVar("BJAM_EXTRA", " ".join(extras))
} }
# Override the contents of specific packages # Override the contents of specific packages