meta: remove True option to getVarFlag calls
getVarFlag() now defaults to expanding by default, thus remove the True option from getVarFlag() calls with a regex search and replace. Search made with the following regex: getVarFlag ?\(( ?[^,()]*, ?[^,()]*), True\) (From OE-Core rev: 2dea9e490a98377010b3d4118d054814c317a735) Signed-off-by: Joshua Lock <joshua.g.lock@intel.com> Signed-off-by: Ross Burton <ross.burton@intel.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
c0f2890c01
commit
3c59b1bf93
|
@ -73,9 +73,9 @@ python () {
|
||||||
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
|
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
|
||||||
return
|
return
|
||||||
|
|
||||||
ar_src = d.getVarFlag('ARCHIVER_MODE', 'src', True)
|
ar_src = d.getVarFlag('ARCHIVER_MODE', 'src')
|
||||||
ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata', True)
|
ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata')
|
||||||
ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe', True)
|
ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe')
|
||||||
|
|
||||||
if ar_src == "original":
|
if ar_src == "original":
|
||||||
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
|
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn)
|
||||||
|
@ -104,7 +104,7 @@ python () {
|
||||||
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
|
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn)
|
||||||
|
|
||||||
# Output the srpm package
|
# Output the srpm package
|
||||||
ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True)
|
ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm')
|
||||||
if ar_srpm == "1":
|
if ar_srpm == "1":
|
||||||
if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm':
|
if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm':
|
||||||
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
|
d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn)
|
||||||
|
@ -127,7 +127,7 @@ python do_ar_original() {
|
||||||
|
|
||||||
import shutil, tempfile
|
import shutil, tempfile
|
||||||
|
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original":
|
if d.getVarFlag('ARCHIVER_MODE', 'src') != "original":
|
||||||
return
|
return
|
||||||
|
|
||||||
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
||||||
|
@ -191,7 +191,7 @@ python do_ar_original() {
|
||||||
|
|
||||||
python do_ar_patched() {
|
python do_ar_patched() {
|
||||||
|
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'src', True) != 'patched':
|
if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched':
|
||||||
return
|
return
|
||||||
|
|
||||||
# Get the ARCHIVER_OUTDIR before we reset the WORKDIR
|
# Get the ARCHIVER_OUTDIR before we reset the WORKDIR
|
||||||
|
@ -206,7 +206,7 @@ python do_ar_configured() {
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured':
|
if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured':
|
||||||
bb.note('Archiving the configured source...')
|
bb.note('Archiving the configured source...')
|
||||||
pn = d.getVar('PN')
|
pn = d.getVar('PN')
|
||||||
# "gcc-source-${PV}" recipes don't have "do_configure"
|
# "gcc-source-${PV}" recipes don't have "do_configure"
|
||||||
|
@ -226,12 +226,12 @@ python do_ar_configured() {
|
||||||
bb.build.exec_func('do_kernel_configme', d)
|
bb.build.exec_func('do_kernel_configme', d)
|
||||||
if bb.data.inherits_class('cmake', d):
|
if bb.data.inherits_class('cmake', d):
|
||||||
bb.build.exec_func('do_generate_toolchain_file', d)
|
bb.build.exec_func('do_generate_toolchain_file', d)
|
||||||
prefuncs = d.getVarFlag('do_configure', 'prefuncs', True)
|
prefuncs = d.getVarFlag('do_configure', 'prefuncs')
|
||||||
for func in (prefuncs or '').split():
|
for func in (prefuncs or '').split():
|
||||||
if func != "sysroot_cleansstate":
|
if func != "sysroot_cleansstate":
|
||||||
bb.build.exec_func(func, d)
|
bb.build.exec_func(func, d)
|
||||||
bb.build.exec_func('do_configure', d)
|
bb.build.exec_func('do_configure', d)
|
||||||
postfuncs = d.getVarFlag('do_configure', 'postfuncs', True)
|
postfuncs = d.getVarFlag('do_configure', 'postfuncs')
|
||||||
for func in (postfuncs or '').split():
|
for func in (postfuncs or '').split():
|
||||||
if func != "do_qa_configure":
|
if func != "do_qa_configure":
|
||||||
bb.build.exec_func(func, d)
|
bb.build.exec_func(func, d)
|
||||||
|
@ -279,7 +279,7 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
|
||||||
# exclude.
|
# exclude.
|
||||||
src_patched = src + '.patched'
|
src_patched = src + '.patched'
|
||||||
oe.path.copyhardlinktree(src, src_patched)
|
oe.path.copyhardlinktree(src, src_patched)
|
||||||
for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude', True).split():
|
for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split():
|
||||||
bb.utils.remove(os.path.join(src_orig, i), recurse=True)
|
bb.utils.remove(os.path.join(src_orig, i), recurse=True)
|
||||||
bb.utils.remove(os.path.join(src_patched, i), recurse=True)
|
bb.utils.remove(os.path.join(src_patched, i), recurse=True)
|
||||||
|
|
||||||
|
@ -293,9 +293,9 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
|
||||||
|
|
||||||
# Run do_unpack and do_patch
|
# Run do_unpack and do_patch
|
||||||
python do_unpack_and_patch() {
|
python do_unpack_and_patch() {
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'src', True) not in \
|
if d.getVarFlag('ARCHIVER_MODE', 'src') not in \
|
||||||
[ 'patched', 'configured'] and \
|
[ 'patched', 'configured'] and \
|
||||||
d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1':
|
d.getVarFlag('ARCHIVER_MODE', 'diff') != '1':
|
||||||
return
|
return
|
||||||
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
||||||
ar_workdir = d.getVar('ARCHIVER_WORKDIR')
|
ar_workdir = d.getVar('ARCHIVER_WORKDIR')
|
||||||
|
@ -314,7 +314,7 @@ python do_unpack_and_patch() {
|
||||||
bb.build.exec_func('do_unpack', d)
|
bb.build.exec_func('do_unpack', d)
|
||||||
|
|
||||||
# Save the original source for creating the patches
|
# Save the original source for creating the patches
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
|
if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
|
||||||
src = d.getVar('S').rstrip('/')
|
src = d.getVar('S').rstrip('/')
|
||||||
src_orig = '%s.orig' % src
|
src_orig = '%s.orig' % src
|
||||||
oe.path.copytree(src, src_orig)
|
oe.path.copytree(src, src_orig)
|
||||||
|
@ -324,7 +324,7 @@ python do_unpack_and_patch() {
|
||||||
bb.build.exec_func('do_patch', d)
|
bb.build.exec_func('do_patch', d)
|
||||||
|
|
||||||
# Create the patches
|
# Create the patches
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1':
|
if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1':
|
||||||
bb.note('Creating diff gz...')
|
bb.note('Creating diff gz...')
|
||||||
create_diff_gz(d, src_orig, src, ar_outdir)
|
create_diff_gz(d, src_orig, src, ar_outdir)
|
||||||
bb.utils.remove(src_orig, recurse=True)
|
bb.utils.remove(src_orig, recurse=True)
|
||||||
|
|
|
@ -33,7 +33,7 @@ python blacklist_multilib_eventhandler() {
|
||||||
continue
|
continue
|
||||||
for p in prefixes:
|
for p in prefixes:
|
||||||
newpkg = p + "-" + pkg
|
newpkg = p + "-" + pkg
|
||||||
if not e.data.getVarFlag('PNBLACKLIST', newpkg, True):
|
if not e.data.getVarFlag('PNBLACKLIST', newpkg):
|
||||||
e.data.setVarFlag('PNBLACKLIST', newpkg, reason)
|
e.data.setVarFlag('PNBLACKLIST', newpkg, reason)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,8 +40,8 @@ python package_do_compress_doc() {
|
||||||
compress_cmds = {}
|
compress_cmds = {}
|
||||||
decompress_cmds = {}
|
decompress_cmds = {}
|
||||||
for mode in compress_list:
|
for mode in compress_list:
|
||||||
compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode, True)
|
compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode)
|
||||||
decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode, True)
|
decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode)
|
||||||
|
|
||||||
mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir"))
|
mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir"))
|
||||||
if os.path.exists(mandir):
|
if os.path.exists(mandir):
|
||||||
|
@ -225,9 +225,9 @@ python compress_doc_updatealternatives () {
|
||||||
old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split()
|
old_names = (d.getVar('ALTERNATIVE_%s' % pkg) or "").split()
|
||||||
new_names = []
|
new_names = []
|
||||||
for old_name in old_names:
|
for old_name in old_names:
|
||||||
old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True)
|
old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name)
|
||||||
old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \
|
old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name) or \
|
||||||
d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \
|
d.getVarFlag('ALTERNATIVE_TARGET', old_name) or \
|
||||||
d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \
|
d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \
|
||||||
d.getVar('ALTERNATIVE_TARGET') or \
|
d.getVar('ALTERNATIVE_TARGET') or \
|
||||||
old_link
|
old_link
|
||||||
|
@ -241,10 +241,10 @@ python compress_doc_updatealternatives () {
|
||||||
new_target = old_target + '.' + compress_mode
|
new_target = old_target + '.' + compress_mode
|
||||||
d.delVarFlag('ALTERNATIVE_LINK_NAME', old_name)
|
d.delVarFlag('ALTERNATIVE_LINK_NAME', old_name)
|
||||||
d.setVarFlag('ALTERNATIVE_LINK_NAME', new_name, new_link)
|
d.setVarFlag('ALTERNATIVE_LINK_NAME', new_name, new_link)
|
||||||
if d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True):
|
if d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name):
|
||||||
d.delVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name)
|
d.delVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name)
|
||||||
d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, new_name, new_target)
|
d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, new_name, new_target)
|
||||||
elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True):
|
elif d.getVarFlag('ALTERNATIVE_TARGET', old_name):
|
||||||
d.delVarFlag('ALTERNATIVE_TARGET', old_name)
|
d.delVarFlag('ALTERNATIVE_TARGET', old_name)
|
||||||
d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target)
|
d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target)
|
||||||
elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg):
|
elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg):
|
||||||
|
|
|
@ -3,7 +3,7 @@ inherit terminal
|
||||||
DEVSHELL = "${SHELL}"
|
DEVSHELL = "${SHELL}"
|
||||||
|
|
||||||
python do_devshell () {
|
python do_devshell () {
|
||||||
if d.getVarFlag("do_devshell", "manualfakeroot", True):
|
if d.getVarFlag("do_devshell", "manualfakeroot"):
|
||||||
d.prependVar("DEVSHELL", "pseudo ")
|
d.prependVar("DEVSHELL", "pseudo ")
|
||||||
fakeenv = d.getVar("FAKEROOTENV").split()
|
fakeenv = d.getVar("FAKEROOTENV").split()
|
||||||
for f in fakeenv:
|
for f in fakeenv:
|
||||||
|
@ -27,7 +27,7 @@ do_devshell[nostamp] = "1"
|
||||||
# be done as the normal user. We therfore carefully construct the envionment
|
# be done as the normal user. We therfore carefully construct the envionment
|
||||||
# manually
|
# manually
|
||||||
python () {
|
python () {
|
||||||
if d.getVarFlag("do_devshell", "fakeroot", True):
|
if d.getVarFlag("do_devshell", "fakeroot"):
|
||||||
# We need to signal our code that we want fakeroot however we
|
# We need to signal our code that we want fakeroot however we
|
||||||
# can't manipulate the environment and variables here yet (see YOCTO #4795)
|
# can't manipulate the environment and variables here yet (see YOCTO #4795)
|
||||||
d.setVarFlag("do_devshell", "manualfakeroot", "1")
|
d.setVarFlag("do_devshell", "manualfakeroot", "1")
|
||||||
|
|
|
@ -69,7 +69,7 @@ python () {
|
||||||
# Dummy value because the default function can't be called with blank SRC_URI
|
# Dummy value because the default function can't be called with blank SRC_URI
|
||||||
d.setVar('SRCPV', '999')
|
d.setVar('SRCPV', '999')
|
||||||
|
|
||||||
tasks = filter(lambda k: d.getVarFlag(k, "task", True), d.keys())
|
tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys())
|
||||||
|
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
if task.endswith("_setscene"):
|
if task.endswith("_setscene"):
|
||||||
|
|
|
@ -19,7 +19,7 @@ def image_buildinfo_outputvars(vars, listvars, d):
|
||||||
ret = ""
|
ret = ""
|
||||||
for var in vars:
|
for var in vars:
|
||||||
value = d.getVar(var) or ""
|
value = d.getVar(var) or ""
|
||||||
if (d.getVarFlag(var, 'type', True) == "list"):
|
if (d.getVarFlag(var, 'type') == "list"):
|
||||||
value = oe.utils.squashspaces(value)
|
value = oe.utils.squashspaces(value)
|
||||||
ret += "%s = %s\n" % (var, value)
|
ret += "%s = %s\n" % (var, value)
|
||||||
return ret.rstrip('\n')
|
return ret.rstrip('\n')
|
||||||
|
|
|
@ -51,7 +51,7 @@ FEATURE_PACKAGES_splash = "${SPLASH}"
|
||||||
IMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("IMAGE_FEATURES", d)}'
|
IMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("IMAGE_FEATURES", d)}'
|
||||||
|
|
||||||
def check_image_features(d):
|
def check_image_features(d):
|
||||||
valid_features = (d.getVarFlag('IMAGE_FEATURES', 'validitems', True) or "").split()
|
valid_features = (d.getVarFlag('IMAGE_FEATURES', 'validitems') or "").split()
|
||||||
valid_features += d.getVarFlags('COMPLEMENTARY_GLOB').keys()
|
valid_features += d.getVarFlags('COMPLEMENTARY_GLOB').keys()
|
||||||
for var in d:
|
for var in d:
|
||||||
if var.startswith("PACKAGE_GROUP_"):
|
if var.startswith("PACKAGE_GROUP_"):
|
||||||
|
@ -595,7 +595,7 @@ python create_symlinks() {
|
||||||
manifest_name = d.getVar('IMAGE_MANIFEST')
|
manifest_name = d.getVar('IMAGE_MANIFEST')
|
||||||
taskname = d.getVar("BB_CURRENTTASK")
|
taskname = d.getVar("BB_CURRENTTASK")
|
||||||
subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split()
|
subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split()
|
||||||
imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.")
|
imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix') or d.expand("${IMAGE_NAME_SUFFIX}.")
|
||||||
|
|
||||||
if not link_name:
|
if not link_name:
|
||||||
return
|
return
|
||||||
|
|
|
@ -249,7 +249,7 @@ def get_boot_dependencies(d):
|
||||||
|
|
||||||
for task in boot_tasks:
|
for task in boot_tasks:
|
||||||
boot_depends_string = "%s %s" % (boot_depends_string,
|
boot_depends_string = "%s %s" % (boot_depends_string,
|
||||||
d.getVarFlag(task, "depends", True) or "")
|
d.getVarFlag(task, "depends") or "")
|
||||||
boot_depends = [dep.split(":")[0] for dep
|
boot_depends = [dep.split(":")[0] for dep
|
||||||
in boot_depends_string.split()
|
in boot_depends_string.split()
|
||||||
if not dep.split(":")[0].endswith("-native")]
|
if not dep.split(":")[0].endswith("-native")]
|
||||||
|
@ -431,10 +431,10 @@ def find_license_files(d):
|
||||||
# unless NO_GENERIC_LICENSE is set.
|
# unless NO_GENERIC_LICENSE is set.
|
||||||
for lic_dir in license_source_dirs:
|
for lic_dir in license_source_dirs:
|
||||||
if not os.path.isfile(os.path.join(lic_dir, license_type)):
|
if not os.path.isfile(os.path.join(lic_dir, license_type)):
|
||||||
if d.getVarFlag('SPDXLICENSEMAP', license_type, True) != None:
|
if d.getVarFlag('SPDXLICENSEMAP', license_type) != None:
|
||||||
# Great, there is an SPDXLICENSEMAP. We can copy!
|
# Great, there is an SPDXLICENSEMAP. We can copy!
|
||||||
bb.debug(1, "We need to use a SPDXLICENSEMAP for %s" % (license_type))
|
bb.debug(1, "We need to use a SPDXLICENSEMAP for %s" % (license_type))
|
||||||
spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type, True)
|
spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type)
|
||||||
license_source = lic_dir
|
license_source = lic_dir
|
||||||
break
|
break
|
||||||
elif os.path.isfile(os.path.join(lic_dir, license_type)):
|
elif os.path.isfile(os.path.join(lic_dir, license_type)):
|
||||||
|
@ -442,7 +442,7 @@ def find_license_files(d):
|
||||||
license_source = lic_dir
|
license_source = lic_dir
|
||||||
break
|
break
|
||||||
|
|
||||||
non_generic_lic = d.getVarFlag('NO_GENERIC_LICENSE', license_type, True)
|
non_generic_lic = d.getVarFlag('NO_GENERIC_LICENSE', license_type)
|
||||||
if spdx_generic and license_source:
|
if spdx_generic and license_source:
|
||||||
# we really should copy to generic_ + spdx_generic, however, that ends up messing the manifest
|
# we really should copy to generic_ + spdx_generic, however, that ends up messing the manifest
|
||||||
# audit up. This should be fixed in emit_pkgdata (or, we actually got and fix all the recipes)
|
# audit up. This should be fixed in emit_pkgdata (or, we actually got and fix all the recipes)
|
||||||
|
@ -451,7 +451,7 @@ def find_license_files(d):
|
||||||
|
|
||||||
# The user may attempt to use NO_GENERIC_LICENSE for a generic license which doesn't make sense
|
# The user may attempt to use NO_GENERIC_LICENSE for a generic license which doesn't make sense
|
||||||
# and should not be allowed, warn the user in this case.
|
# and should not be allowed, warn the user in this case.
|
||||||
if d.getVarFlag('NO_GENERIC_LICENSE', license_type, True):
|
if d.getVarFlag('NO_GENERIC_LICENSE', license_type):
|
||||||
bb.warn("%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type))
|
bb.warn("%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type))
|
||||||
|
|
||||||
elif non_generic_lic and non_generic_lic in lic_chksums:
|
elif non_generic_lic and non_generic_lic in lic_chksums:
|
||||||
|
@ -505,7 +505,7 @@ def return_spdx(d, license):
|
||||||
"""
|
"""
|
||||||
This function returns the spdx mapping of a license if it exists.
|
This function returns the spdx mapping of a license if it exists.
|
||||||
"""
|
"""
|
||||||
return d.getVarFlag('SPDXLICENSEMAP', license, True)
|
return d.getVarFlag('SPDXLICENSEMAP', license)
|
||||||
|
|
||||||
def canonical_license(d, license):
|
def canonical_license(d, license):
|
||||||
"""
|
"""
|
||||||
|
@ -514,7 +514,7 @@ def canonical_license(d, license):
|
||||||
'X' if availabel and the tailing '+' (so GPLv3+ becomes GPL-3.0+),
|
'X' if availabel and the tailing '+' (so GPLv3+ becomes GPL-3.0+),
|
||||||
or the passed license if there is no canonical form.
|
or the passed license if there is no canonical form.
|
||||||
"""
|
"""
|
||||||
lic = d.getVarFlag('SPDXLICENSEMAP', license, True) or ""
|
lic = d.getVarFlag('SPDXLICENSEMAP', license) or ""
|
||||||
if not lic and license.endswith('+'):
|
if not lic and license.endswith('+'):
|
||||||
lic = d.getVarFlag('SPDXLICENSEMAP', license.rstrip('+'), True)
|
lic = d.getVarFlag('SPDXLICENSEMAP', license.rstrip('+'), True)
|
||||||
if lic:
|
if lic:
|
||||||
|
@ -531,7 +531,7 @@ def expand_wildcard_licenses(d, wildcard_licenses):
|
||||||
spdxmapkeys = d.getVarFlags('SPDXLICENSEMAP').keys()
|
spdxmapkeys = d.getVarFlags('SPDXLICENSEMAP').keys()
|
||||||
for wld_lic in wildcard_licenses:
|
for wld_lic in wildcard_licenses:
|
||||||
spdxflags = fnmatch.filter(spdxmapkeys, wld_lic)
|
spdxflags = fnmatch.filter(spdxmapkeys, wld_lic)
|
||||||
licenses += [d.getVarFlag('SPDXLICENSEMAP', flag, True) for flag in spdxflags]
|
licenses += [d.getVarFlag('SPDXLICENSEMAP', flag) for flag in spdxflags]
|
||||||
|
|
||||||
spdx_lics = (d.getVar('SRC_DISTRIBUTE_LICENSES', False) or '').split()
|
spdx_lics = (d.getVar('SRC_DISTRIBUTE_LICENSES', False) or '').split()
|
||||||
for wld_lic in wildcard_licenses:
|
for wld_lic in wildcard_licenses:
|
||||||
|
|
|
@ -462,7 +462,7 @@ def get_package_additional_metadata (pkg_type, d):
|
||||||
if d.getVar(key, False) is None:
|
if d.getVar(key, False) is None:
|
||||||
continue
|
continue
|
||||||
d.setVarFlag(key, "type", "list")
|
d.setVarFlag(key, "type", "list")
|
||||||
if d.getVarFlag(key, "separator", True) is None:
|
if d.getVarFlag(key, "separator") is None:
|
||||||
d.setVarFlag(key, "separator", "\\n")
|
d.setVarFlag(key, "separator", "\\n")
|
||||||
metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
|
metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)]
|
||||||
return "\n".join(metadata_fields).strip()
|
return "\n".join(metadata_fields).strip()
|
||||||
|
@ -1963,7 +1963,7 @@ python package_depchains() {
|
||||||
|
|
||||||
for suffix in pkgs:
|
for suffix in pkgs:
|
||||||
for pkg in pkgs[suffix]:
|
for pkg in pkgs[suffix]:
|
||||||
if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', True):
|
if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
|
||||||
continue
|
continue
|
||||||
(base, func) = pkgs[suffix][pkg]
|
(base, func) = pkgs[suffix][pkg]
|
||||||
if suffix == "-dev":
|
if suffix == "-dev":
|
||||||
|
|
|
@ -86,14 +86,14 @@ python write_specfile () {
|
||||||
|
|
||||||
# append information for logs and patches to %prep
|
# append information for logs and patches to %prep
|
||||||
def add_prep(d,spec_files_bottom):
|
def add_prep(d,spec_files_bottom):
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
|
if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d):
|
||||||
spec_files_bottom.append('%%prep -n %s' % d.getVar('PN') )
|
spec_files_bottom.append('%%prep -n %s' % d.getVar('PN') )
|
||||||
spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
|
spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"")
|
||||||
spec_files_bottom.append('')
|
spec_files_bottom.append('')
|
||||||
|
|
||||||
# append the name of tarball to key word 'SOURCE' in xxx.spec.
|
# append the name of tarball to key word 'SOURCE' in xxx.spec.
|
||||||
def tail_source(d):
|
def tail_source(d):
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
|
if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d):
|
||||||
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
ar_outdir = d.getVar('ARCHIVER_OUTDIR')
|
||||||
if not os.path.exists(ar_outdir):
|
if not os.path.exists(ar_outdir):
|
||||||
return
|
return
|
||||||
|
@ -720,7 +720,7 @@ python do_package_rpm () {
|
||||||
cmd = cmd + " --define 'debug_package %{nil}'"
|
cmd = cmd + " --define 'debug_package %{nil}'"
|
||||||
cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'"
|
cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'"
|
||||||
cmd = cmd + " --define '_tmppath " + workdir + "'"
|
cmd = cmd + " --define '_tmppath " + workdir + "'"
|
||||||
if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d):
|
if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d):
|
||||||
cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'"
|
cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'"
|
||||||
cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR') + "'"
|
cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR') + "'"
|
||||||
cmdsrpm = cmdsrpm + " -bs " + outspecfile
|
cmdsrpm = cmdsrpm + " -bs " + outspecfile
|
||||||
|
|
|
@ -58,7 +58,7 @@ python do_package_tar () {
|
||||||
|
|
||||||
python () {
|
python () {
|
||||||
if d.getVar('PACKAGES') != '':
|
if d.getVar('PACKAGES') != '':
|
||||||
deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split()
|
deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split()
|
||||||
deps.append('tar-native:do_populate_sysroot')
|
deps.append('tar-native:do_populate_sysroot')
|
||||||
deps.append('virtual/fakeroot-native:do_populate_sysroot')
|
deps.append('virtual/fakeroot-native:do_populate_sysroot')
|
||||||
d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps))
|
d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps))
|
||||||
|
|
|
@ -51,7 +51,7 @@ python() {
|
||||||
|
|
||||||
d.appendVarFlag('do_build', 'recrdeptask', ' ' + pkgcomparefunc)
|
d.appendVarFlag('do_build', 'recrdeptask', ' ' + pkgcomparefunc)
|
||||||
|
|
||||||
if d.getVarFlag(pkgwritefunc, 'noexec', True) or not d.getVarFlag(pkgwritefunc, 'task', True):
|
if d.getVarFlag(pkgwritefunc, 'noexec') or not d.getVarFlag(pkgwritefunc, 'task'):
|
||||||
# Packaging is disabled for this recipe, we shouldn't do anything
|
# Packaging is disabled for this recipe, we shouldn't do anything
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ inherit terminal
|
||||||
|
|
||||||
python () {
|
python () {
|
||||||
if d.getVar('PATCHTOOL') == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS') == '1':
|
if d.getVar('PATCHTOOL') == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS') == '1':
|
||||||
tasks = list(filter(lambda k: d.getVarFlag(k, "task", True), d.keys()))
|
tasks = list(filter(lambda k: d.getVarFlag(k, "task"), d.keys()))
|
||||||
extratasks = []
|
extratasks = []
|
||||||
def follow_chain(task, endtask, chain=None):
|
def follow_chain(task, endtask, chain=None):
|
||||||
if not chain:
|
if not chain:
|
||||||
|
|
|
@ -629,7 +629,7 @@ def get_ext_sdk_depends(d):
|
||||||
pn = d.getVar('PN')
|
pn = d.getVar('PN')
|
||||||
deplist = ['%s:%s' % (pn, dep) for dep in deps]
|
deplist = ['%s:%s' % (pn, dep) for dep in deps]
|
||||||
for task in ['do_image_complete', 'do_rootfs', 'do_build']:
|
for task in ['do_image_complete', 'do_rootfs', 'do_build']:
|
||||||
deplist.extend((d.getVarFlag(task, 'depends', True) or '').split())
|
deplist.extend((d.getVarFlag(task, 'depends') or '').split())
|
||||||
return ' '.join(deplist)
|
return ' '.join(deplist)
|
||||||
|
|
||||||
python do_sdk_depends() {
|
python do_sdk_depends() {
|
||||||
|
@ -652,7 +652,7 @@ def get_sdk_ext_rdepends(d):
|
||||||
localdata = d.createCopy()
|
localdata = d.createCopy()
|
||||||
localdata.appendVar('OVERRIDES', ':task-populate-sdk-ext')
|
localdata.appendVar('OVERRIDES', ':task-populate-sdk-ext')
|
||||||
bb.data.update_data(localdata)
|
bb.data.update_data(localdata)
|
||||||
return localdata.getVarFlag('do_populate_sdk', 'rdepends', True)
|
return localdata.getVarFlag('do_populate_sdk', 'rdepends')
|
||||||
|
|
||||||
do_populate_sdk_ext[dirs] = "${@d.getVarFlag('do_populate_sdk', 'dirs', False)}"
|
do_populate_sdk_ext[dirs] = "${@d.getVarFlag('do_populate_sdk', 'dirs', False)}"
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg"
|
||||||
python () {
|
python () {
|
||||||
|
|
||||||
if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
|
if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
|
||||||
flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
|
flags = d.getVarFlag('do_rootfs', 'recrdeptask')
|
||||||
flags = flags.replace("do_package_write_ipk", "")
|
flags = flags.replace("do_package_write_ipk", "")
|
||||||
flags = flags.replace("do_deploy", "")
|
flags = flags.replace("do_deploy", "")
|
||||||
flags = flags.replace("do_populate_sysroot", "")
|
flags = flags.replace("do_populate_sysroot", "")
|
||||||
|
|
|
@ -26,7 +26,7 @@ do_rootfs[vardeps] += "PACKAGE_FEED_URIS"
|
||||||
|
|
||||||
python () {
|
python () {
|
||||||
if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
|
if d.getVar('BUILD_IMAGES_FROM_FEEDS'):
|
||||||
flags = d.getVarFlag('do_rootfs', 'recrdeptask', True)
|
flags = d.getVarFlag('do_rootfs', 'recrdeptask')
|
||||||
flags = flags.replace("do_package_write_rpm", "")
|
flags = flags.replace("do_package_write_rpm", "")
|
||||||
flags = flags.replace("do_deploy", "")
|
flags = flags.replace("do_deploy", "")
|
||||||
flags = flags.replace("do_populate_sysroot", "")
|
flags = flags.replace("do_populate_sysroot", "")
|
||||||
|
|
|
@ -125,12 +125,12 @@ def sstate_state_fromvars(d, task = None):
|
||||||
|
|
||||||
if task.startswith("do_"):
|
if task.startswith("do_"):
|
||||||
task = task[3:]
|
task = task[3:]
|
||||||
inputs = (d.getVarFlag("do_" + task, 'sstate-inputdirs', True) or "").split()
|
inputs = (d.getVarFlag("do_" + task, 'sstate-inputdirs') or "").split()
|
||||||
outputs = (d.getVarFlag("do_" + task, 'sstate-outputdirs', True) or "").split()
|
outputs = (d.getVarFlag("do_" + task, 'sstate-outputdirs') or "").split()
|
||||||
plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs', True) or "").split()
|
plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs') or "").split()
|
||||||
lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile', True) or "").split()
|
lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split()
|
||||||
lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared', True) or "").split()
|
lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split()
|
||||||
interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs', True) or "").split()
|
interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "").split()
|
||||||
if not task or len(inputs) != len(outputs):
|
if not task or len(inputs) != len(outputs):
|
||||||
bb.fatal("sstate variables not setup correctly?!")
|
bb.fatal("sstate variables not setup correctly?!")
|
||||||
|
|
||||||
|
@ -425,7 +425,7 @@ def sstate_clean(ss, d):
|
||||||
|
|
||||||
d2 = d.createCopy()
|
d2 = d.createCopy()
|
||||||
stamp_clean = d.getVar("STAMPCLEAN")
|
stamp_clean = d.getVar("STAMPCLEAN")
|
||||||
extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True)
|
extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info')
|
||||||
if extrainf:
|
if extrainf:
|
||||||
d2.setVar("SSTATE_MANMACH", extrainf)
|
d2.setVar("SSTATE_MANMACH", extrainf)
|
||||||
wildcard_stfile = "%s.do_%s*.%s" % (stamp_clean, ss['task'], extrainf)
|
wildcard_stfile = "%s.do_%s*.%s" % (stamp_clean, ss['task'], extrainf)
|
||||||
|
|
|
@ -58,7 +58,7 @@ def exportTests(d,tc):
|
||||||
savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP")
|
savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP")
|
||||||
|
|
||||||
keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \
|
keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \
|
||||||
and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func", True)]
|
and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func")]
|
||||||
for key in keys:
|
for key in keys:
|
||||||
try:
|
try:
|
||||||
savedata["d"][key] = d.getVar(key)
|
savedata["d"][key] = d.getVar(key)
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
python check_types() {
|
python check_types() {
|
||||||
import oe.types
|
import oe.types
|
||||||
for key in e.data.keys():
|
for key in e.data.keys():
|
||||||
if e.data.getVarFlag(key, "type", True):
|
if e.data.getVarFlag(key, "type"):
|
||||||
oe.data.typed_value(key, e.data)
|
oe.data.typed_value(key, e.data)
|
||||||
}
|
}
|
||||||
addhandler check_types
|
addhandler check_types
|
||||||
|
|
|
@ -133,12 +133,12 @@ python perform_packagecopy_append () {
|
||||||
# If the src == dest, we know we need to rename the dest by appending ${BPN}
|
# If the src == dest, we know we need to rename the dest by appending ${BPN}
|
||||||
link_rename = {}
|
link_rename = {}
|
||||||
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
|
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
|
||||||
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
|
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
|
||||||
if not alt_link:
|
if not alt_link:
|
||||||
alt_link = "%s/%s" % (d.getVar('bindir'), alt_name)
|
alt_link = "%s/%s" % (d.getVar('bindir'), alt_name)
|
||||||
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
|
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
|
||||||
|
|
||||||
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
|
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name)
|
||||||
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
|
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
|
||||||
# Sometimes alt_target is specified as relative to the link name.
|
# Sometimes alt_target is specified as relative to the link name.
|
||||||
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
|
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
|
||||||
|
@ -198,13 +198,13 @@ python populate_packages_updatealternatives () {
|
||||||
alt_setup_links = ""
|
alt_setup_links = ""
|
||||||
alt_remove_links = ""
|
alt_remove_links = ""
|
||||||
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
|
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
|
||||||
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
|
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
|
||||||
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
|
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name)
|
||||||
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
|
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
|
||||||
# Sometimes alt_target is specified as relative to the link name.
|
# Sometimes alt_target is specified as relative to the link name.
|
||||||
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
|
alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
|
||||||
|
|
||||||
alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True)
|
alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name)
|
||||||
alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg) or d.getVar('ALTERNATIVE_PRIORITY')
|
alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg) or d.getVar('ALTERNATIVE_PRIORITY')
|
||||||
|
|
||||||
# This shouldn't trigger, as it should have been resolved earlier!
|
# This shouldn't trigger, as it should have been resolved earlier!
|
||||||
|
@ -247,8 +247,8 @@ python package_do_filedeps_append () {
|
||||||
|
|
||||||
for pkg in packages.split():
|
for pkg in packages.split():
|
||||||
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
|
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
|
||||||
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
|
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
|
||||||
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
|
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name)
|
||||||
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
|
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
|
||||||
|
|
||||||
if alt_link == alt_target:
|
if alt_link == alt_target:
|
||||||
|
|
|
@ -4,12 +4,12 @@ python do_listtasks() {
|
||||||
taskdescs = {}
|
taskdescs = {}
|
||||||
maxlen = 0
|
maxlen = 0
|
||||||
for e in d.keys():
|
for e in d.keys():
|
||||||
if d.getVarFlag(e, 'task', True):
|
if d.getVarFlag(e, 'task'):
|
||||||
maxlen = max(maxlen, len(e))
|
maxlen = max(maxlen, len(e))
|
||||||
if e.endswith('_setscene'):
|
if e.endswith('_setscene'):
|
||||||
desc = "%s (setscene version)" % (d.getVarFlag(e[:-9], 'doc', True) or '')
|
desc = "%s (setscene version)" % (d.getVarFlag(e[:-9], 'doc') or '')
|
||||||
else:
|
else:
|
||||||
desc = d.getVarFlag(e, 'doc', True) or ''
|
desc = d.getVarFlag(e, 'doc') or ''
|
||||||
taskdescs[e] = desc
|
taskdescs[e] = desc
|
||||||
|
|
||||||
tasks = sorted(taskdescs.keys())
|
tasks = sorted(taskdescs.keys())
|
||||||
|
|
|
@ -3,7 +3,7 @@ import oe.maketype
|
||||||
def typed_value(key, d):
|
def typed_value(key, d):
|
||||||
"""Construct a value for the specified metadata variable, using its flags
|
"""Construct a value for the specified metadata variable, using its flags
|
||||||
to determine the type and parameters for construction."""
|
to determine the type and parameters for construction."""
|
||||||
var_type = d.getVarFlag(key, 'type', True)
|
var_type = d.getVarFlag(key, 'type')
|
||||||
flags = d.getVarFlags(key)
|
flags = d.getVarFlags(key)
|
||||||
if flags is not None:
|
if flags is not None:
|
||||||
flags = dict((flag, d.expand(value))
|
flags = dict((flag, d.expand(value))
|
||||||
|
|
|
@ -3,9 +3,9 @@ import itertools
|
||||||
def is_optional(feature, d):
|
def is_optional(feature, d):
|
||||||
packages = d.getVar("FEATURE_PACKAGES_%s" % feature)
|
packages = d.getVar("FEATURE_PACKAGES_%s" % feature)
|
||||||
if packages:
|
if packages:
|
||||||
return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True))
|
return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional"))
|
||||||
else:
|
else:
|
||||||
return bool(d.getVarFlag("PACKAGE_GROUP_%s" % feature, "optional", True))
|
return bool(d.getVarFlag("PACKAGE_GROUP_%s" % feature, "optional"))
|
||||||
|
|
||||||
def packages(features, d):
|
def packages(features, d):
|
||||||
for feature in features:
|
for feature in features:
|
||||||
|
|
|
@ -348,7 +348,7 @@ def sstate_get_manifest_filename(task, d):
|
||||||
Also returns the datastore that can be used to query related variables.
|
Also returns the datastore that can be used to query related variables.
|
||||||
"""
|
"""
|
||||||
d2 = d.createCopy()
|
d2 = d.createCopy()
|
||||||
extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info', True)
|
extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info')
|
||||||
if extrainf:
|
if extrainf:
|
||||||
d2.setVar("SSTATE_MANMACH", extrainf)
|
d2.setVar("SSTATE_MANMACH", extrainf)
|
||||||
return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
|
return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2)
|
||||||
|
|
Loading…
Reference in New Issue