meta/scripts: Various getVar/getVarFlag expansion parameter fixes

There were a few straggling expansion parameter removals left for
getVar/getVarFlag where the odd whitespace meant they were missed
on previous passes. There were also some plain broken ussages such
as:

d.getVar('ALTERNATIVE_TARGET', old_name, True)
path = d.getVar('PATH', d, True)
d.getVar('IMAGE_ROOTFS', 'True')

which I've corrected (they happend to work by luck).

(From OE-Core rev: 688f7a64917a5ce5cbe12f8e5da4d47e265d240f)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2017-01-05 21:15:08 +00:00
parent e016eb10b0
commit 022a8b58c8
29 changed files with 57 additions and 59 deletions

View File

@ -38,7 +38,7 @@ python blacklist_multilib_eventhandler() {
} }
python () { python () {
blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN'), True) blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN'))
if blacklist: if blacklist:
raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist))

View File

@ -117,7 +117,7 @@ python bugzilla_eventhandler() {
compon = data.getVar("BUGZILLA_COMPONENT") compon = data.getVar("BUGZILLA_COMPONENT")
version = data.getVar("BUGZILLA_VERSION") version = data.getVar("BUGZILLA_VERSION")
proxy = data.getVar('http_proxy', True ) proxy = data.getVar('http_proxy')
if (proxy): if (proxy):
import urllib2 import urllib2
s, u, p, hostport = urllib2._parse_proxy(proxy) s, u, p, hostport = urllib2._parse_proxy(proxy)
@ -140,7 +140,7 @@ python bugzilla_eventhandler() {
"pv" : data.getVar("PV"), "pv" : data.getVar("PV"),
} }
log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task)) log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task))
text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar( 'MACHINE', True ) ) text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar('MACHINE') )
if len(log_file) != 0: if len(log_file) != 0:
print >> debug_file, "Adding log file %s" % log_file[0] print >> debug_file, "Adding log file %s" % log_file[0]
file = open(log_file[0], 'r') file = open(log_file[0], 'r')

View File

@ -27,7 +27,7 @@ python do_menuconfig() {
mtime = 0 mtime = 0
oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'), oe_terminal("${SHELL} -c \"make %s; if [ \$? -ne 0 ]; then echo 'Command failed.'; printf 'Press any key to continue... '; read r; fi\"" % d.getVar('KCONFIG_CONFIG_COMMAND'),
d.getVar('PN', True ) + ' Configuration', d) d.getVar('PN') + ' Configuration', d)
# FIXME this check can be removed when the minimum bitbake version has been bumped # FIXME this check can be removed when the minimum bitbake version has been bumped
if hasattr(bb.build, 'write_taint'): if hasattr(bb.build, 'write_taint'):

View File

@ -249,7 +249,7 @@ python compress_doc_updatealternatives () {
d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target) d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target)
elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg): elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg):
d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target) d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target)
elif d.getVar('ALTERNATIVE_TARGET', old_name, True): elif d.getVar('ALTERNATIVE_TARGET'):
d.setVar('ALTERNATIVE_TARGET', new_target) d.setVar('ALTERNATIVE_TARGET', new_target)
new_names.append(new_name) new_names.append(new_name)

View File

@ -60,8 +60,8 @@ efi_hddimg_populate() {
} }
python build_efi_cfg() { python build_efi_cfg() {
s = d.getVar("S", True) s = d.getVar("S")
labels = d.getVar('LABELS', True) labels = d.getVar('LABELS')
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
@ -70,7 +70,7 @@ python build_efi_cfg() {
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('GUMMIBOOT_CFG', True) cfile = d.getVar('GUMMIBOOT_CFG')
try: try:
cfgfile = open(cfile, 'w') cfgfile = open(cfile, 'w')
except OSError: except OSError:
@ -78,7 +78,7 @@ python build_efi_cfg() {
cfgfile.write('# Automatically created by OE\n') cfgfile.write('# Automatically created by OE\n')
cfgfile.write('default %s\n' % (labels.split()[0])) cfgfile.write('default %s\n' % (labels.split()[0]))
timeout = d.getVar('GUMMIBOOT_TIMEOUT', True) timeout = d.getVar('GUMMIBOOT_TIMEOUT')
if timeout: if timeout:
cfgfile.write('timeout %s\n' % timeout) cfgfile.write('timeout %s\n' % timeout)
else: else:
@ -88,7 +88,7 @@ python build_efi_cfg() {
for label in labels.split(): for label in labels.split():
localdata = d.createCopy() localdata = d.createCopy()
overrides = localdata.getVar('OVERRIDES', True) overrides = localdata.getVar('OVERRIDES')
if not overrides: if not overrides:
bb.fatal('OVERRIDES not defined') bb.fatal('OVERRIDES not defined')
@ -104,8 +104,8 @@ python build_efi_cfg() {
entrycfg.write('title %s\n' % label) entrycfg.write('title %s\n' % label)
entrycfg.write('linux /vmlinuz\n') entrycfg.write('linux /vmlinuz\n')
append = localdata.getVar('APPEND', True) append = localdata.getVar('APPEND')
initrd = localdata.getVar('INITRD', True) initrd = localdata.getVar('INITRD')
if initrd: if initrd:
entrycfg.write('initrd /initrd\n') entrycfg.write('initrd /initrd\n')

View File

@ -336,7 +336,7 @@ def package_qa_check_libdir(d):
import re import re
pkgdest = d.getVar('PKGDEST') pkgdest = d.getVar('PKGDEST')
base_libdir = d.getVar("base_libdir",True) + os.sep base_libdir = d.getVar("base_libdir") + os.sep
libdir = d.getVar("libdir") + os.sep libdir = d.getVar("libdir") + os.sep
libexecdir = d.getVar("libexecdir") + os.sep libexecdir = d.getVar("libexecdir") + os.sep
exec_prefix = d.getVar("exec_prefix") + os.sep exec_prefix = d.getVar("exec_prefix") + os.sep
@ -568,7 +568,7 @@ def package_qa_check_desktop(path, name, d, elf, messages):
Run all desktop files through desktop-file-validate. Run all desktop files through desktop-file-validate.
""" """
if path.endswith(".desktop"): if path.endswith(".desktop"):
desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate') desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate')
output = os.popen("%s %s" % (desktop_file_validate, path)) output = os.popen("%s %s" % (desktop_file_validate, path))
# This only produces output on errors # This only produces output on errors
for l in output: for l in output:

View File

@ -286,7 +286,7 @@ python do_kernel_configcheck() {
# if KMETA isn't set globally by a recipe using this routine, we need to # if KMETA isn't set globally by a recipe using this routine, we need to
# set the default to 'meta'. Otherwise, kconf_check is not passed a valid # set the default to 'meta'. Otherwise, kconf_check is not passed a valid
# meta-series for processing # meta-series for processing
kmeta = d.getVar( "KMETA", True ) or "meta" kmeta = d.getVar("KMETA") or "meta"
if not os.path.exists(kmeta): if not os.path.exists(kmeta):
kmeta = "." + kmeta kmeta = "." + kmeta
@ -298,8 +298,8 @@ python do_kernel_configcheck() {
cmd = d.expand("cd ${S}; kconf_check --report -o ${S}/%s/cfg/ ${B}/.config ${S} %s" % (kmeta,configs)) cmd = d.expand("cd ${S}; kconf_check --report -o ${S}/%s/cfg/ ${B}/.config ${S} %s" % (kmeta,configs))
ret, result = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd)) ret, result = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd))
config_check_visibility = int(d.getVar( "KCONF_AUDIT_LEVEL", True ) or 0) config_check_visibility = int(d.getVar("KCONF_AUDIT_LEVEL") or 0)
bsp_check_visibility = int(d.getVar( "KCONF_BSP_AUDIT_LEVEL", True ) or 0) bsp_check_visibility = int(d.getVar("KCONF_BSP_AUDIT_LEVEL") or 0)
# if config check visibility is non-zero, report dropped configuration values # if config check visibility is non-zero, report dropped configuration values
mismatch_file = d.expand("${S}/%s/cfg/mismatch.txt" % kmeta) mismatch_file = d.expand("${S}/%s/cfg/mismatch.txt" % kmeta)

View File

@ -117,7 +117,7 @@ def write_license_files(d, license_manifest, pkg_dic):
copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST') copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST')
copy_lic_dirs = d.getVar('COPY_LIC_DIRS') copy_lic_dirs = d.getVar('COPY_LIC_DIRS')
if copy_lic_manifest == "1": if copy_lic_manifest == "1":
rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'), rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS'),
'usr', 'share', 'common-licenses') 'usr', 'share', 'common-licenses')
bb.utils.mkdirhier(rootfs_license_dir) bb.utils.mkdirhier(rootfs_license_dir)
rootfs_license_manifest = os.path.join(rootfs_license_dir, rootfs_license_manifest = os.path.join(rootfs_license_dir,
@ -516,7 +516,7 @@ def canonical_license(d, license):
""" """
lic = d.getVarFlag('SPDXLICENSEMAP', license) or "" lic = d.getVarFlag('SPDXLICENSEMAP', license) or ""
if not lic and license.endswith('+'): if not lic and license.endswith('+'):
lic = d.getVarFlag('SPDXLICENSEMAP', license.rstrip('+'), True) lic = d.getVarFlag('SPDXLICENSEMAP', license.rstrip('+'))
if lic: if lic:
lic += '+' lic += '+'
return lic or license return lic or license

View File

@ -119,7 +119,7 @@ PACKAGEFUNCS_append = " do_package_qa_multilib"
python do_package_qa_multilib() { python do_package_qa_multilib() {
def check_mlprefix(pkg, var, mlprefix): def check_mlprefix(pkg, var, mlprefix):
values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var) or "") values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg)) or d.getVar(var) or "")
candidates = [] candidates = []
for i in values: for i in values:
if i.startswith('virtual/'): if i.startswith('virtual/'):

View File

@ -1286,11 +1286,11 @@ python emit_pkgdata() {
c = codecs.getencoder("unicode_escape") c = codecs.getencoder("unicode_escape")
return c(str)[0].decode("latin1") return c(str)[0].decode("latin1")
val = d.getVar('%s_%s' % (var, pkg), True) val = d.getVar('%s_%s' % (var, pkg))
if val: if val:
f.write('%s_%s: %s\n' % (var, pkg, encode(val))) f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
return val return val
val = d.getVar('%s' % (var), True) val = d.getVar('%s' % (var))
if val: if val:
f.write('%s: %s\n' % (var, encode(val))) f.write('%s: %s\n' % (var, encode(val)))
return val return val

View File

@ -84,7 +84,7 @@ def package_compare_impl(pkgtype, d):
import oe.sstatesig import oe.sstatesig
pn = d.getVar('PN') pn = d.getVar('PN')
deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper())
prepath = deploydir + '-prediff/' prepath = deploydir + '-prediff/'
# Find out PKGR values are # Find out PKGR values are

View File

@ -512,7 +512,7 @@ install_tools() {
# (they get populated from sstate on installation) # (they get populated from sstate on installation)
unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd" unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd"
if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" -a ! -e $unfsd_path ] ; then
binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR'))} binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE'), d.getVar('TOPDIR'))}
lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd $unfsd_path
fi fi
touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase

View File

@ -52,7 +52,7 @@ def qemu_run_binary(data, rootfs_path, binary):
# this dance). For others (e.g. arm) a -cpu option is not necessary, since the # this dance). For others (e.g. arm) a -cpu option is not necessary, since the
# qemu-arm default CPU supports all required architecture levels. # qemu-arm default CPU supports all required architecture levels.
QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH'), True) or ""}" QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH')) or ""}"
QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}" QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}"
QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2" QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2"

View File

@ -199,7 +199,7 @@ def check_toolchain_tune_args(data, tune, multilib, errs):
def check_toolchain_args_present(data, tune, multilib, tune_errors, which): def check_toolchain_args_present(data, tune, multilib, tune_errors, which):
args_set = (data.getVar("TUNE_%s" % which) or "").split() args_set = (data.getVar("TUNE_%s" % which) or "").split()
args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split() args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune)) or "").split()
args_missing = [] args_missing = []
# If no args are listed/required, we are done. # If no args are listed/required, we are done.
@ -455,7 +455,7 @@ def check_gcc_march(sanity_data):
message = "" message = ""
# Check if -march not in BUILD_CFLAGS # Check if -march not in BUILD_CFLAGS
if sanity_data.getVar("BUILD_CFLAGS",True).find("-march") < 0: if sanity_data.getVar("BUILD_CFLAGS").find("-march") < 0:
result = False result = False
# Construct a test file # Construct a test file

View File

@ -69,7 +69,7 @@ python systemd_populate_packages() {
return return
def get_package_var(d, var, pkg): def get_package_var(d, var, pkg):
val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip() val = (d.getVar('%s_%s' % (var, pkg)) or "").strip()
if val == "": if val == "":
val = (d.getVar(var) or "").strip() val = (d.getVar(var) or "").strip()
return val return val

View File

@ -82,7 +82,7 @@ def tinder_format_http_post(d,status,log):
# we only need on build_status.pl but sending it # we only need on build_status.pl but sending it
# always does not hurt # always does not hurt
try: try:
f = open(d.getVar('TMPDIR',True)+'/tinder-machine.id', 'r') f = open(d.getVar('TMPDIR')+'/tinder-machine.id', 'r')
id = f.read() id = f.read()
variables['machine_id'] = id variables['machine_id'] = id
except: except:
@ -106,8 +106,8 @@ def tinder_build_start(d):
# get the body and type # get the body and type
content_type, body = tinder_format_http_post(d,None,None) content_type, body = tinder_format_http_post(d,None,None)
server = d.getVar('TINDER_HOST', True ) server = d.getVar('TINDER_HOST')
url = d.getVar('TINDER_URL', True ) url = d.getVar('TINDER_URL')
selector = url + "/xml/build_start.pl" selector = url + "/xml/build_start.pl"
@ -163,16 +163,16 @@ def tinder_print_info(d):
time = tinder_time_string() time = tinder_time_string()
ops = os.uname()[0] ops = os.uname()[0]
version = os.uname()[2] version = os.uname()[2]
url = d.getVar( 'TINDER_URL' , True ) url = d.getVar('TINDER_URL')
tree = d.getVar( 'TINDER_TREE', True ) tree = d.getVar('TINDER_TREE')
branch = d.getVar( 'TINDER_BRANCH', True ) branch = d.getVar('TINDER_BRANCH')
srcdate = d.getVar( 'SRCDATE', True ) srcdate = d.getVar('SRCDATE')
machine = d.getVar( 'MACHINE', True ) machine = d.getVar('MACHINE')
distro = d.getVar( 'DISTRO', True ) distro = d.getVar('DISTRO')
bbfiles = d.getVar( 'BBFILES', True ) bbfiles = d.getVar('BBFILES')
tarch = d.getVar( 'TARGET_ARCH', True ) tarch = d.getVar('TARGET_ARCH')
fpu = d.getVar( 'TARGET_FPU', True ) fpu = d.getVar('TARGET_FPU')
oerev = d.getVar( 'OE_REVISION', True ) or "unknown" oerev = d.getVar('OE_REVISION') or "unknown"
# there is a bug with tipple quoted strings # there is a bug with tipple quoted strings
# i will work around but will fix the original # i will work around but will fix the original
@ -326,7 +326,7 @@ def tinder_do_tinder_report(event):
status = 100 status = 100
# Check if we have a old status... # Check if we have a old status...
try: try:
h = open(event.data.getVar('TMPDIR',True)+'/tinder-status', 'r') h = open(event.data.getVar('TMPDIR')+'/tinder-status', 'r')
status = int(h.read()) status = int(h.read())
except: except:
pass pass

View File

@ -20,7 +20,7 @@ python uninative_event_fetchloader() {
loader isn't already present. loader isn't already present.
""" """
chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH"), True) chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH"))
if not chksum: if not chksum:
bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH")) bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH"))

View File

@ -307,7 +307,7 @@ hardlinkdir () {
def check_app_exists(app, d): def check_app_exists(app, d):
app = d.expand(app).strip() app = d.expand(app).strip()
path = d.getVar('PATH', d, True) path = d.getVar('PATH')
return bool(bb.utils.which(path, app)) return bool(bb.utils.which(path, app))
def explode_deps(s): def explode_deps(s):

View File

@ -561,7 +561,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types ${DEBUG_PREFIX_MAP}"
# Disabled until the option works properly -feliminate-dwarf2-dups # Disabled until the option works properly -feliminate-dwarf2-dups
FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}"
DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe"
SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD') == '1'], True)}" SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD') == '1'])}"
SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION" SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION"
BUILD_OPTIMIZATION = "-O2 -pipe" BUILD_OPTIMIZATION = "-O2 -pipe"

View File

@ -1,5 +1,5 @@
baselib = "${@d.getVar('BASE_LIB_tune-' + (d.getVar('DEFAULTTUNE') or 'INVALID'), True) or d.getVar('BASELIB')}" baselib = "${@d.getVar('BASE_LIB_tune-' + (d.getVar('DEFAULTTUNE') or 'INVALID')) or d.getVar('BASELIB')}"
MULTILIB_VARIANTS = "${@extend_variants(d,'MULTILIBS','multilib')}" MULTILIB_VARIANTS = "${@extend_variants(d,'MULTILIBS','multilib')}"
MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS TARGET_VENDOR" MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS TARGET_VENDOR"

View File

@ -129,11 +129,9 @@ class RpmIndexer(Indexer):
if default_tune: if default_tune:
localdata.setVar("DEFAULTTUNE", default_tune) localdata.setVar("DEFAULTTUNE", default_tune)
bb.data.update_data(localdata) bb.data.update_data(localdata)
package_archs[eext[1]] = localdata.getVar('PACKAGE_ARCHS', package_archs[eext[1]] = localdata.getVar('PACKAGE_ARCHS').split()
True).split()
package_archs[eext[1]].reverse() package_archs[eext[1]].reverse()
target_os[eext[1]] = localdata.getVar("TARGET_OS", target_os[eext[1]] = localdata.getVar("TARGET_OS").strip()
True).strip()
ml_prefix_list = collections.OrderedDict() ml_prefix_list = collections.OrderedDict()
for mlib in package_archs: for mlib in package_archs:

View File

@ -30,7 +30,7 @@ def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
return falsevalue return falsevalue
def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
result = bb.utils.vercmp_string(d.getVar(variable,True), checkvalue) result = bb.utils.vercmp_string(d.getVar(variable), checkvalue)
if result <= 0: if result <= 0:
return truevalue return truevalue
else: else:

View File

@ -99,7 +99,7 @@ class PtestRunnerTest(oeRuntimeTest):
return complementary_pkgs.split() return complementary_pkgs.split()
def setUpLocal(self): def setUpLocal(self):
self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME')) self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR"), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME'))
@skipUnlessPassed('test_ssh') @skipUnlessPassed('test_ssh')
def test_ptestrunner(self): def test_ptestrunner(self):

View File

@ -26,7 +26,7 @@ def glibc_dl_info(d):
ld_append_if_tune_exists(d, infos, ld_info_all) ld_append_if_tune_exists(d, infos, ld_info_all)
#DEFAULTTUNE_MULTILIB_ORIGINAL #DEFAULTTUNE_MULTILIB_ORIGINAL
original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True) original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
if original_tune: if original_tune:
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
localdata.setVar("DEFAULTTUNE", original_tune) localdata.setVar("DEFAULTTUNE", original_tune)

View File

@ -6,7 +6,7 @@ PN = "meta-environment-extsdk-${MACHINE}"
create_sdk_files_append() { create_sdk_files_append() {
local sysroot=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_TARGET'), d.getVar('TOPDIR'))} local sysroot=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_TARGET'), d.getVar('TOPDIR'))}
local sdkpathnative=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_NATIVE',True), d.getVar('TOPDIR'))} local sdkpathnative=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_NATIVE'), d.getVar('TOPDIR'))}
toolchain_create_sdk_env_script '' '' $sysroot '' ${bindir_native} ${prefix_native} $sdkpathnative toolchain_create_sdk_env_script '' '' $sysroot '' ${bindir_native} ${prefix_native} $sdkpathnative
} }

View File

@ -33,7 +33,7 @@ LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://COPYING;md5=d7810fab7487fb0aad327b76f1be7cd7" LIC_FILES_CHKSUM = "file://COPYING;md5=d7810fab7487fb0aad327b76f1be7cd7"
python __anonymous () { python __anonymous () {
major = d.getVar("PV",True).split('.')[0] major = d.getVar("PV").split('.')[0]
if major == "3": if major == "3":
d.setVar("HEADER_FETCH_VER", "3.0") d.setVar("HEADER_FETCH_VER", "3.0")
elif major == "4": elif major == "4":

View File

@ -21,8 +21,8 @@ do_install_append_class-native () {
if test "${libdir}" = "${base_libdir}" ; then if test "${libdir}" = "${base_libdir}" ; then
return return
fi fi
librelpath=${@os.path.relpath(d.getVar('libdir',True), d.getVar('base_libdir'))} librelpath=${@os.path.relpath(d.getVar('libdir'), d.getVar('base_libdir'))}
baselibrelpath=${@os.path.relpath(d.getVar('base_libdir',True), d.getVar('libdir'))} baselibrelpath=${@os.path.relpath(d.getVar('base_libdir'), d.getVar('libdir'))}
# Remove bad symlinks & create the correct symlinks # Remove bad symlinks & create the correct symlinks
if test -L ${D}${libdir}/lib${BPN}.so ; then if test -L ${D}${libdir}/lib${BPN}.so ; then

View File

@ -34,7 +34,7 @@ def package(args, config, basepath, workspace):
if not image_pkgtype: if not image_pkgtype:
image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE') image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True) deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
finally: finally:
tinfoil.shutdown() tinfoil.shutdown()

View File

@ -117,7 +117,7 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
srctree = os.path.abspath(srctree) srctree = os.path.abspath(srctree)
pn = d.getVar('PN',True) pn = d.getVar('PN')
af = os.path.join(appendpath, '%s.bbappend' % brf) af = os.path.join(appendpath, '%s.bbappend' % brf)
with open(af, 'w') as f: with open(af, 'w') as f:
f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n') f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')