classes/lib: Add expand parameter to getVarFlag
This sets the scene for removing the default False for expansion from getVarFlag. This would later allow True to become the default. On the most part this is an automatic translation with: sed -e 's:\(\.getVarFlag([^,()]*, [^,()]*\)):\1, True):g' -i `grep -ril getVar *` In this case, the default was False, but True was used since in most cases here expansion would be expected. (From OE-Core rev: 42a10788e89b07b14a150ced07113566cf99fcdd) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
252e64550a
commit
7fa6eeba1c
|
@ -40,8 +40,8 @@ python package_do_compress_doc() {
|
|||
compress_cmds = {}
|
||||
decompress_cmds = {}
|
||||
for mode in compress_list:
|
||||
compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode)
|
||||
decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode)
|
||||
compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True)
|
||||
decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True)
|
||||
|
||||
mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir", True))
|
||||
if os.path.exists(mandir):
|
||||
|
|
|
@ -3,7 +3,7 @@ inherit terminal
|
|||
DEVSHELL = "${SHELL}"
|
||||
|
||||
python do_devshell () {
|
||||
if d.getVarFlag("do_devshell", "manualfakeroot"):
|
||||
if d.getVarFlag("do_devshell", "manualfakeroot", True):
|
||||
d.prependVar("DEVSHELL", "pseudo ")
|
||||
fakeenv = d.getVar("FAKEROOTENV", True).split()
|
||||
for f in fakeenv:
|
||||
|
@ -27,7 +27,7 @@ do_devshell[nostamp] = "1"
|
|||
# be done as the normal user. We therfore carefully construct the envionment
|
||||
# manually
|
||||
python () {
|
||||
if d.getVarFlag("do_devshell", "fakeroot"):
|
||||
if d.getVarFlag("do_devshell", "fakeroot", True):
|
||||
# We need to signal our code that we want fakeroot however we
|
||||
# can't manipulate the environment and variables here yet (see YOCTO #4795)
|
||||
d.setVarFlag("do_devshell", "manualfakeroot", "1")
|
||||
|
|
|
@ -51,7 +51,7 @@ python () {
|
|||
# Dummy value because the default function can't be called with blank SRC_URI
|
||||
d.setVar('SRCPV', '999')
|
||||
|
||||
tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys())
|
||||
tasks = filter(lambda k: d.getVarFlag(k, "task", True), d.keys())
|
||||
|
||||
for task in tasks:
|
||||
if task.endswith("_setscene"):
|
||||
|
|
|
@ -19,7 +19,7 @@ def image_buildinfo_outputvars(vars, listvars, d):
|
|||
ret = ""
|
||||
for var in vars:
|
||||
value = d.getVar(var, True) or ""
|
||||
if (d.getVarFlag(var, 'type') == "list"):
|
||||
if (d.getVarFlag(var, 'type', True) == "list"):
|
||||
value = oe.utils.squashspaces(value)
|
||||
ret += "%s = %s\n" % (var, value)
|
||||
return ret.rstrip('\n')
|
||||
|
|
|
@ -47,7 +47,7 @@ python() {
|
|||
# and cross packages which aren't swabber-native or one of its dependencies
|
||||
# I have ignored them for now...
|
||||
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('nativesdk', d) and not bb.data.inherits_class('cross', d):
|
||||
deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
|
||||
deps = (d.getVarFlag('do_setscene', 'depends', True) or "").split()
|
||||
deps.append('strace-native:do_populate_sysroot')
|
||||
d.setVarFlag('do_setscene', 'depends', " ".join(deps))
|
||||
logdir = d.expand("${TRACE_LOGDIR}")
|
||||
|
|
|
@ -416,10 +416,10 @@ def find_license_files(d):
|
|||
|
||||
for lic_dir in license_source_dirs:
|
||||
if not os.path.isfile(os.path.join(lic_dir, license_type)):
|
||||
if d.getVarFlag('SPDXLICENSEMAP', license_type) != None:
|
||||
if d.getVarFlag('SPDXLICENSEMAP', license_type, True) != None:
|
||||
# Great, there is an SPDXLICENSEMAP. We can copy!
|
||||
bb.debug(1, "We need to use a SPDXLICENSEMAP for %s" % (license_type))
|
||||
spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type)
|
||||
spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type, True)
|
||||
license_source = lic_dir
|
||||
break
|
||||
elif os.path.isfile(os.path.join(lic_dir, license_type)):
|
||||
|
@ -435,14 +435,14 @@ def find_license_files(d):
|
|||
|
||||
# The user may attempt to use NO_GENERIC_LICENSE for a generic license which doesn't make sense
|
||||
# and should not be allowed, warn the user in this case.
|
||||
if d.getVarFlag('NO_GENERIC_LICENSE', license_type):
|
||||
if d.getVarFlag('NO_GENERIC_LICENSE', license_type, True):
|
||||
bb.warn("%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type))
|
||||
|
||||
elif d.getVarFlag('NO_GENERIC_LICENSE', license_type):
|
||||
elif d.getVarFlag('NO_GENERIC_LICENSE', license_type, True):
|
||||
# if NO_GENERIC_LICENSE is set, we copy the license files from the fetched source
|
||||
# of the package rather than the license_source_dirs.
|
||||
for (basename, path) in lic_files_paths:
|
||||
if d.getVarFlag('NO_GENERIC_LICENSE', license_type) == basename:
|
||||
if d.getVarFlag('NO_GENERIC_LICENSE', license_type, True) == basename:
|
||||
lic_files_paths.append(("generic_" + license_type, path))
|
||||
break
|
||||
else:
|
||||
|
@ -510,7 +510,7 @@ def expand_wildcard_licenses(d, wildcard_licenses):
|
|||
spdxmapkeys = d.getVarFlags('SPDXLICENSEMAP').keys()
|
||||
for wld_lic in wildcard_licenses:
|
||||
spdxflags = fnmatch.filter(spdxmapkeys, wld_lic)
|
||||
licenses += [d.getVarFlag('SPDXLICENSEMAP', flag) for flag in spdxflags]
|
||||
licenses += [d.getVarFlag('SPDXLICENSEMAP', flag, True) for flag in spdxflags]
|
||||
|
||||
spdx_lics = (d.getVar('SRC_DISTRIBUTE_LICENSES', False) or '').split()
|
||||
for wld_lic in wildcard_licenses:
|
||||
|
|
|
@ -429,7 +429,7 @@ def get_package_additional_metadata (pkg_type, d):
|
|||
if d.getVar(key, False) is None:
|
||||
continue
|
||||
d.setVarFlag(key, "type", "list")
|
||||
if d.getVarFlag(key, "separator") is None:
|
||||
if d.getVarFlag(key, "separator", True) is None:
|
||||
d.setVarFlag(key, "separator", "\\n")
|
||||
metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
|
||||
return "\n".join(metadata_fields).strip()
|
||||
|
@ -1916,7 +1916,7 @@ python package_depchains() {
|
|||
|
||||
for suffix in pkgs:
|
||||
for pkg in pkgs[suffix]:
|
||||
if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
|
||||
if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', True):
|
||||
continue
|
||||
(base, func) = pkgs[suffix][pkg]
|
||||
if suffix == "-dev":
|
||||
|
|
|
@ -53,7 +53,7 @@ python do_package_tar () {
|
|||
|
||||
python () {
|
||||
if d.getVar('PACKAGES', True) != '':
|
||||
deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split()
|
||||
deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split()
|
||||
deps.append('tar-native:do_populate_sysroot')
|
||||
deps.append('virtual/fakeroot-native:do_populate_sysroot')
|
||||
d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps))
|
||||
|
|
|
@ -28,7 +28,7 @@ MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg"
|
|||
python () {
|
||||
|
||||
if d.getVar('BUILD_IMAGES_FROM_FEEDS', True):
|
||||
flags = d.getVarFlag('do_rootfs', 'recrdeptask')
|
||||
flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
|
||||
flags = flags.replace("do_package_write_ipk", "")
|
||||
flags = flags.replace("do_deploy", "")
|
||||
flags = flags.replace("do_populate_sysroot", "")
|
||||
|
|
|
@ -31,7 +31,7 @@ do_populate_sdk[lockfiles] += "${DEPLOY_DIR_RPM}/rpm.lock"
|
|||
|
||||
python () {
|
||||
if d.getVar('BUILD_IMAGES_FROM_FEEDS', True):
|
||||
flags = d.getVarFlag('do_rootfs', 'recrdeptask')
|
||||
flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
|
||||
flags = flags.replace("do_package_write_rpm", "")
|
||||
flags = flags.replace("do_deploy", "")
|
||||
flags = flags.replace("do_populate_sysroot", "")
|
||||
|
|
|
@ -194,7 +194,7 @@ def exportTests(d,tc):
|
|||
savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP", True)
|
||||
|
||||
keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \
|
||||
and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func")]
|
||||
and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func", True)]
|
||||
for key in keys:
|
||||
try:
|
||||
savedata["d"][key] = d.getVar(key, True)
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
python check_types() {
|
||||
import oe.types
|
||||
for key in e.data.keys():
|
||||
if e.data.getVarFlag(key, "type"):
|
||||
if e.data.getVarFlag(key, "type", True):
|
||||
oe.data.typed_value(key, e.data)
|
||||
}
|
||||
addhandler check_types
|
||||
|
|
|
@ -4,12 +4,12 @@ python do_listtasks() {
|
|||
taskdescs = {}
|
||||
maxlen = 0
|
||||
for e in d.keys():
|
||||
if d.getVarFlag(e, 'task'):
|
||||
if d.getVarFlag(e, 'task', True):
|
||||
maxlen = max(maxlen, len(e))
|
||||
if e.endswith('_setscene'):
|
||||
desc = "%s (setscene version)" % (d.getVarFlag(e[:-9], 'doc') or '')
|
||||
desc = "%s (setscene version)" % (d.getVarFlag(e[:-9], 'doc', True) or '')
|
||||
else:
|
||||
desc = d.getVarFlag(e, 'doc') or ''
|
||||
desc = d.getVarFlag(e, 'doc', True) or ''
|
||||
taskdescs[e] = desc
|
||||
|
||||
tasks = sorted(taskdescs.keys())
|
||||
|
|
|
@ -3,7 +3,7 @@ import oe.maketype
|
|||
def typed_value(key, d):
|
||||
"""Construct a value for the specified metadata variable, using its flags
|
||||
to determine the type and parameters for construction."""
|
||||
var_type = d.getVarFlag(key, 'type')
|
||||
var_type = d.getVarFlag(key, 'type', True)
|
||||
flags = d.getVarFlags(key)
|
||||
if flags is not None:
|
||||
flags = dict((flag, d.expand(value))
|
||||
|
|
|
@ -3,9 +3,9 @@ import itertools
|
|||
def is_optional(feature, d):
|
||||
packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True)
|
||||
if packages:
|
||||
return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional"))
|
||||
return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True))
|
||||
else:
|
||||
return bool(d.getVarFlag("PACKAGE_GROUP_%s" % feature, "optional"))
|
||||
return bool(d.getVarFlag("PACKAGE_GROUP_%s" % feature, "optional", True))
|
||||
|
||||
def packages(features, d):
|
||||
for feature in features:
|
||||
|
|
Loading…
Reference in New Issue