meta: Replace bb.data.expand(xxx, d) -> d.expand(xxx)
sed \ -e 's:bb.data.\(expand([^,()]*\), *\([^) ]*\) *):\2.\1):g' \ -i `grep -ril bb.data.expand *` Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
22f0df2aa7
commit
b1aeaa8b0d
|
@ -122,7 +122,7 @@ def generate_git_config(e):
|
|||
gitconfig_path = e.data.getVar('GIT_CONFIG', True)
|
||||
proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True)
|
||||
|
||||
bb.mkdirhier(bb.data.expand("${GIT_CONFIG_PATH}", e.data))
|
||||
bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}"))
|
||||
if (os.path.exists(gitconfig_path)):
|
||||
os.remove(gitconfig_path)
|
||||
|
||||
|
@ -307,7 +307,7 @@ python () {
|
|||
def appendVar(varname, appends):
|
||||
if not appends:
|
||||
return
|
||||
varname = bb.data.expand(varname, d)
|
||||
varname = d.expand(varname)
|
||||
d.appendVar(varname, " " + " ".join(appends))
|
||||
|
||||
extradeps = []
|
||||
|
|
|
@ -132,7 +132,7 @@ def get_timedata(var, data):
|
|||
def write_task_data(status, logfile, dev, e):
|
||||
bn = get_bn(e)
|
||||
bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
|
||||
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
|
||||
taskdir = os.path.join(bsdir, e.data.expand("${PF}"))
|
||||
file = open(os.path.join(logfile), "a")
|
||||
timedata = get_timedata("__timedata_task", e.data)
|
||||
if timedata:
|
||||
|
@ -205,7 +205,7 @@ python run_buildstats () {
|
|||
bn = get_bn(e)
|
||||
device = get_device(e)
|
||||
bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
|
||||
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
|
||||
taskdir = os.path.join(bsdir, e.data.expand("${PF}"))
|
||||
build_time = os.path.join(bsdir, "build_stats")
|
||||
file = open(build_time, "a")
|
||||
########################################################################
|
||||
|
@ -230,7 +230,7 @@ python run_buildstats () {
|
|||
bn = get_bn(e)
|
||||
device = get_device(e)
|
||||
bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
|
||||
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
|
||||
taskdir = os.path.join(bsdir, e.data.expand("${PF}"))
|
||||
if device != "NoLogicalDevice":
|
||||
set_diskdata("__diskdata_task", device, e.data)
|
||||
set_timedata("__timedata_task", e.data)
|
||||
|
@ -248,7 +248,7 @@ python run_buildstats () {
|
|||
bn = get_bn(e)
|
||||
device = get_device(e)
|
||||
bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
|
||||
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
|
||||
taskdir = os.path.join(bsdir, e.data.expand("${PF}"))
|
||||
write_task_data("passed", os.path.join(taskdir, e.task), device, e)
|
||||
if e.task == "do_rootfs":
|
||||
bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
|
||||
|
@ -263,7 +263,7 @@ python run_buildstats () {
|
|||
bn = get_bn(e)
|
||||
device = get_device(e)
|
||||
bsdir = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), bn)
|
||||
taskdir = os.path.join(bsdir, bb.data.expand("${PF}", e.data))
|
||||
taskdir = os.path.join(bsdir, e.data.expand("${PF}"))
|
||||
write_task_data("failed", os.path.join(taskdir, e.task), device, e)
|
||||
########################################################################
|
||||
# Lets make things easier and tell people where the build failed in
|
||||
|
@ -272,7 +272,7 @@ python run_buildstats () {
|
|||
########################################################################
|
||||
build_status = os.path.join(bsdir, "build_stats")
|
||||
file = open(build_status,"a")
|
||||
file.write(bb.data.expand("Failed at: ${PF} at task: %s \n" % e.task, e.data))
|
||||
file.write(e.data.expand("Failed at: ${PF} at task: %s \n" % e.task))
|
||||
file.close()
|
||||
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ PERL_OWN_DIR = "${@["", "/perl-native"][(bb.data.inherits_class('native', d))]}"
|
|||
# Determine the staged version of perl from the perl configuration file
|
||||
def get_perl_version(d):
|
||||
import re
|
||||
cfg = bb.data.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh', d)
|
||||
cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh')
|
||||
try:
|
||||
f = open(cfg, 'r')
|
||||
except IOError:
|
||||
|
|
|
@ -39,7 +39,7 @@ def icecc_dep_prepend(d):
|
|||
DEPENDS_prepend += "${@icecc_dep_prepend(d)} "
|
||||
|
||||
def get_cross_kernel_cc(bb,d):
|
||||
kernel_cc = bb.data.expand('${KERNEL_CC}', d)
|
||||
kernel_cc = d.expand('${KERNEL_CC}')
|
||||
kernel_cc = kernel_cc.replace('ccache', '').strip()
|
||||
kernel_cc = kernel_cc.split(' ')[0]
|
||||
kernel_cc = kernel_cc.strip()
|
||||
|
@ -49,7 +49,7 @@ def create_path(compilers, bb, d):
|
|||
"""
|
||||
Create Symlinks for the icecc in the staging directory
|
||||
"""
|
||||
staging = os.path.join(bb.data.expand('${STAGING_BINDIR}', d), "ice")
|
||||
staging = os.path.join(d.expand('${STAGING_BINDIR}'), "ice")
|
||||
if icc_is_kernel(bb, d):
|
||||
staging += "-kernel"
|
||||
|
||||
|
@ -78,7 +78,7 @@ def create_path(compilers, bb, d):
|
|||
return staging
|
||||
|
||||
def use_icc(bb,d):
|
||||
package_tmp = bb.data.expand('${PN}', d)
|
||||
package_tmp = d.expand('${PN}')
|
||||
|
||||
system_class_blacklist = [ "none" ]
|
||||
user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL') or "none").split()
|
||||
|
@ -101,7 +101,7 @@ def use_icc(bb,d):
|
|||
return "no"
|
||||
|
||||
if d.getVar('PARALLEL_MAKE') == "":
|
||||
bb.note(package_tmp, " ", bb.data.expand('${PV}', d), " has empty PARALLEL_MAKE, disable icecc")
|
||||
bb.note(package_tmp, " ", d.expand('${PV}'), " has empty PARALLEL_MAKE, disable icecc")
|
||||
return "no"
|
||||
|
||||
return "yes"
|
||||
|
@ -124,19 +124,19 @@ def icc_version(bb, d):
|
|||
|
||||
if icc_is_native(bb, d):
|
||||
archive_name = "local-host-env"
|
||||
elif bb.data.expand('${HOST_PREFIX}', d) == "":
|
||||
bb.fatal(bb.data.expand("${PN}", d), " NULL prefix")
|
||||
elif d.expand('${HOST_PREFIX}') == "":
|
||||
bb.fatal(d.expand("${PN}"), " NULL prefix")
|
||||
else:
|
||||
prefix = bb.data.expand('${HOST_PREFIX}' , d)
|
||||
distro = bb.data.expand('${DISTRO}', d)
|
||||
target_sys = bb.data.expand('${TARGET_SYS}', d)
|
||||
prefix = d.expand('${HOST_PREFIX}' )
|
||||
distro = d.expand('${DISTRO}')
|
||||
target_sys = d.expand('${TARGET_SYS}')
|
||||
float = d.getVar('TARGET_FPU') or "hard"
|
||||
archive_name = prefix + distro + "-" + target_sys + "-" + float
|
||||
if icc_is_kernel(bb, d):
|
||||
archive_name += "-kernel"
|
||||
|
||||
import socket
|
||||
ice_dir = bb.data.expand('${STAGING_DIR_NATIVE}${prefix_native}', d)
|
||||
ice_dir = d.expand('${STAGING_DIR_NATIVE}${prefix_native}')
|
||||
tar_file = os.path.join(ice_dir, 'ice', archive_name + "-@VERSION@-" + socket.gethostname() + '.tar.gz')
|
||||
|
||||
return tar_file
|
||||
|
@ -146,7 +146,7 @@ def icc_path(bb,d):
|
|||
return create_path( [get_cross_kernel_cc(bb,d), ], bb, d)
|
||||
|
||||
else:
|
||||
prefix = bb.data.expand('${HOST_PREFIX}', d)
|
||||
prefix = d.expand('${HOST_PREFIX}')
|
||||
return create_path( [prefix+"gcc", prefix+"g++"], bb, d)
|
||||
|
||||
def icc_get_tool(bb, d, tool):
|
||||
|
@ -155,8 +155,8 @@ def icc_get_tool(bb, d, tool):
|
|||
elif icc_is_kernel(bb, d):
|
||||
return os.popen("which %s" % get_cross_kernel_cc(bb, d)).read()[:-1]
|
||||
else:
|
||||
ice_dir = bb.data.expand('${STAGING_BINDIR_TOOLCHAIN}', d)
|
||||
target_sys = bb.data.expand('${TARGET_SYS}', d)
|
||||
ice_dir = d.expand('${STAGING_BINDIR_TOOLCHAIN}')
|
||||
target_sys = d.expand('${TARGET_SYS}')
|
||||
return os.path.join(ice_dir, "%s-%s" % (target_sys, tool))
|
||||
|
||||
set_icecc_env() {
|
||||
|
|
|
@ -54,7 +54,7 @@ python() {
|
|||
deps = (d.getVarFlag('do_setscene', 'depends') or "").split()
|
||||
deps.append('strace-native:do_populate_sysroot')
|
||||
d.setVarFlag('do_setscene', 'depends', " ".join(deps))
|
||||
logdir = bb.data.expand("${TRACE_LOGDIR}", d)
|
||||
logdir = d.expand("${TRACE_LOGDIR}")
|
||||
bb.utils.mkdirhier(logdir)
|
||||
else:
|
||||
d.setVar('STRACEFUNC', '')
|
||||
|
|
|
@ -673,7 +673,7 @@ python do_package_qa () {
|
|||
python do_qa_staging() {
|
||||
bb.note("QA checking staging")
|
||||
|
||||
if not package_qa_check_staged(bb.data.expand('${SYSROOT_DESTDIR}/${STAGING_LIBDIR}',d), d):
|
||||
if not package_qa_check_staged(d.expand('${SYSROOT_DESTDIR}/${STAGING_LIBDIR}'), d):
|
||||
bb.fatal("QA staging was broken by the package built above")
|
||||
}
|
||||
|
||||
|
|
|
@ -244,7 +244,7 @@ python do_kernel_configcheck() {
|
|||
bb.plain("NOTE: validating kernel configuration")
|
||||
|
||||
pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH', True), "${S}/scripts/util/")
|
||||
cmd = bb.data.expand("cd ${B}/..; kconf_check -config- ${B} ${S} ${B} ${KBRANCH}",d )
|
||||
cmd = d.expand("cd ${B}/..; kconf_check -config- ${B} ${S} ${B} ${KBRANCH}")
|
||||
ret, result = commands.getstatusoutput("%s%s" % (pathprefix, cmd))
|
||||
|
||||
bb.plain( "%s" % result )
|
||||
|
|
|
@ -195,10 +195,10 @@ def splitfile(file, debugfile, debugsrcdir, d):
|
|||
dvar = d.getVar('PKGD', True)
|
||||
pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
|
||||
objcopy = d.getVar("OBJCOPY", True)
|
||||
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
|
||||
debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
|
||||
workdir = d.getVar("WORKDIR", True)
|
||||
workparentdir = os.path.dirname(workdir)
|
||||
sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d)
|
||||
sourcefile = d.expand("${WORKDIR}/debugsources.list")
|
||||
|
||||
# We ignore kernel modules, we don't generate debug info files.
|
||||
if file.find("/lib/modules/") != -1 and file.endswith(".ko"):
|
||||
|
@ -238,11 +238,11 @@ def splitfile2(debugsrcdir, d):
|
|||
pathprefix = "export PATH=%s; " % d.getVar('PATH', True)
|
||||
strip = d.getVar("STRIP", True)
|
||||
objcopy = d.getVar("OBJCOPY", True)
|
||||
debugedit = bb.data.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit", d)
|
||||
debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit")
|
||||
workdir = d.getVar("WORKDIR", True)
|
||||
workparentdir = os.path.dirname(workdir)
|
||||
workbasedir = os.path.basename(workdir)
|
||||
sourcefile = bb.data.expand("${WORKDIR}/debugsources.list", d)
|
||||
sourcefile = d.expand("${WORKDIR}/debugsources.list")
|
||||
|
||||
if debugsrcdir:
|
||||
nosuchdir = []
|
||||
|
@ -624,7 +624,7 @@ python fixup_perms () {
|
|||
if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
|
||||
bb.error("Fixup perms: %s invalid line: %s" % (conf, line))
|
||||
continue
|
||||
entry = fs_perms_entry(bb.data.expand(line, d))
|
||||
entry = fs_perms_entry(d.expand(line))
|
||||
if entry and entry.path:
|
||||
fs_perms_table[entry.path] = entry
|
||||
f.close()
|
||||
|
@ -1071,9 +1071,9 @@ python emit_pkgdata() {
|
|||
pkgdatadir = d.getVar('PKGDESTWORK', True)
|
||||
|
||||
# Take shared lock since we're only reading, not writing
|
||||
lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d), True)
|
||||
lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
|
||||
|
||||
data_file = pkgdatadir + bb.data.expand("/${PN}" , d)
|
||||
data_file = pkgdatadir + d.expand("/${PN}" )
|
||||
f = open(data_file, 'w')
|
||||
f.write("PACKAGES: %s\n" % packages)
|
||||
f.close()
|
||||
|
@ -1154,7 +1154,7 @@ python package_do_filedeps() {
|
|||
pkgdest = d.getVar('PKGDEST', True)
|
||||
packages = d.getVar('PACKAGES', True)
|
||||
|
||||
rpmdeps = bb.data.expand("${RPMDEPS}", d)
|
||||
rpmdeps = d.expand("${RPMDEPS}")
|
||||
r = re.compile(r'[<>=]+ +[^ ]*')
|
||||
|
||||
# Quick routine to process the results of the rpmdeps call...
|
||||
|
@ -1253,7 +1253,7 @@ python package_do_shlibs() {
|
|||
shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
|
||||
|
||||
# Take shared lock since we're only reading, not writing
|
||||
lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d))
|
||||
lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
|
||||
|
||||
def linux_so(root, path, file):
|
||||
cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(os.path.join(root, file)) + " 2>/dev/null"
|
||||
|
@ -1499,7 +1499,7 @@ python package_do_pkgconfig () {
|
|||
if m:
|
||||
name = m.group(1)
|
||||
val = m.group(2)
|
||||
pd.setVar(name, bb.data.expand(val, pd))
|
||||
pd.setVar(name, pd.expand(val))
|
||||
continue
|
||||
m = field_re.match(l)
|
||||
if m:
|
||||
|
@ -1509,7 +1509,7 @@ python package_do_pkgconfig () {
|
|||
pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
|
||||
|
||||
# Take shared lock since we're only reading, not writing
|
||||
lf = bb.utils.lockfile(bb.data.expand("${PACKAGELOCK}", d))
|
||||
lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
|
||||
|
||||
for pkg in packages.split():
|
||||
pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
|
||||
|
@ -1560,7 +1560,7 @@ python read_shlibdeps () {
|
|||
rdepends = bb.utils.explode_dep_versions(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "")
|
||||
|
||||
for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
|
||||
depsfile = bb.data.expand("${PKGDEST}/" + pkg + extension, d)
|
||||
depsfile = d.expand("${PKGDEST}/" + pkg + extension)
|
||||
if os.access(depsfile, os.R_OK):
|
||||
fd = file(depsfile)
|
||||
lines = fd.readlines()
|
||||
|
|
|
@ -20,7 +20,7 @@ python do_package_deb_install () {
|
|||
pkgfn = d.getVar('PKGFN', True)
|
||||
rootfs = d.getVar('IMAGE_ROOTFS', True)
|
||||
debdir = d.getVar('DEPLOY_DIR_DEB', True)
|
||||
apt_config = bb.data.expand('${STAGING_ETCDIR_NATIVE}/apt/apt.conf', d)
|
||||
apt_config = d.expand('${STAGING_ETCDIR_NATIVE}/apt/apt.conf')
|
||||
stagingbindir = d.getVar('STAGING_BINDIR_NATIVE', True)
|
||||
tmpdir = d.getVar('TMPDIR', True)
|
||||
|
||||
|
|
|
@ -998,9 +998,9 @@ python do_package_rpm () {
|
|||
d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch)
|
||||
else:
|
||||
d.setVar('PACKAGE_ARCH_EXTEND', package_arch)
|
||||
pkgwritedir = bb.data.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}', d)
|
||||
pkgarch = bb.data.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}', d)
|
||||
magicfile = bb.data.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc', d)
|
||||
pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}')
|
||||
pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${TARGET_VENDOR}-${TARGET_OS}')
|
||||
magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc')
|
||||
bb.mkdirhier(pkgwritedir)
|
||||
os.chmod(pkgwritedir, 0755)
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ IMAGE_PKGTYPE ?= "tar"
|
|||
|
||||
python package_tar_fn () {
|
||||
fn = os.path.join(d.getVar('DEPLOY_DIR_TAR'), "%s-%s-%s.tar.gz" % (d.getVar('PKG'), d.getVar('PKGV'), d.getVar('PKGR')))
|
||||
fn = bb.data.expand(fn, d)
|
||||
fn = d.expand(fn)
|
||||
d.setVar('PKGFN', fn)
|
||||
}
|
||||
|
||||
|
@ -68,7 +68,7 @@ python do_package_tar () {
|
|||
overrides = localdata.getVar('OVERRIDES')
|
||||
if not overrides:
|
||||
raise bb.build.FuncFailed('OVERRIDES not defined')
|
||||
overrides = bb.data.expand(overrides, localdata)
|
||||
overrides = localdata.expand(overrides)
|
||||
localdata.setVar('OVERRIDES', '%s:%s' % (overrides, pkg))
|
||||
|
||||
bb.data.update_data(localdata)
|
||||
|
|
|
@ -66,7 +66,7 @@ def can_use_autotools_base(cfgdata, d):
|
|||
def can_remove_FILESPATH(cfgdata, d):
|
||||
expected = cfgdata.get("FILESPATH")
|
||||
#expected = "${@':'.join([os.path.normpath(os.path.join(fp, p, o)) for fp in d.getVar('FILESPATHBASE', True).split(':') for p in d.getVar('FILESPATHPKG', True).split(':') for o in (d.getVar('OVERRIDES', True) + ':').split(':') if os.path.exists(os.path.join(fp, p, o))])}:${FILESDIR}"
|
||||
expectedpaths = bb.data.expand(expected, d)
|
||||
expectedpaths = d.expand(expected)
|
||||
unexpanded = d.getVar("FILESPATH", 0)
|
||||
filespath = d.getVar("FILESPATH", True).split(":")
|
||||
filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)]
|
||||
|
@ -91,7 +91,7 @@ def can_remove_FILESDIR(cfgdata, d):
|
|||
return unexpanded != expected and \
|
||||
os.path.exists(expanded) and \
|
||||
(expanded in filespath or
|
||||
expanded == bb.data.expand(expected, d))
|
||||
expanded == d.expand(expected))
|
||||
|
||||
def can_remove_others(p, cfgdata, d):
|
||||
for k in ["S", "PV", "PN", "DESCRIPTION", "LICENSE", "DEPENDS",
|
||||
|
@ -104,7 +104,7 @@ def can_remove_others(p, cfgdata, d):
|
|||
|
||||
try:
|
||||
expanded = d.getVar(k, True)
|
||||
cfgexpanded = bb.data.expand(cfgunexpanded, d)
|
||||
cfgexpanded = d.expand(cfgunexpanded)
|
||||
except bb.fetch.ParameterError:
|
||||
continue
|
||||
|
||||
|
|
|
@ -7,9 +7,9 @@ def process_dir (directory, d):
|
|||
import subprocess as sub
|
||||
import stat
|
||||
|
||||
cmd = bb.data.expand('${CHRPATH_BIN}', d)
|
||||
cmd = d.expand('${CHRPATH_BIN}')
|
||||
tmpdir = d.getVar('TMPDIR')
|
||||
basedir = bb.data.expand('${base_prefix}', d)
|
||||
basedir = d.expand('${base_prefix}')
|
||||
|
||||
#bb.debug("Checking %s for binaries to process" % directory)
|
||||
if not os.path.exists(directory):
|
||||
|
@ -82,7 +82,7 @@ def process_dir (directory, d):
|
|||
os.chmod(fpath, perms)
|
||||
|
||||
def rpath_replace (path, d):
|
||||
bindirs = bb.data.expand("${bindir} ${sbindir} ${base_sbindir} ${base_bindir} ${libdir} ${base_libdir} ${libexecdir} ${PREPROCESS_RELOCATE_DIRS}", d).split()
|
||||
bindirs = d.expand("${bindir} ${sbindir} ${base_sbindir} ${base_bindir} ${libdir} ${base_libdir} ${libexecdir} ${PREPROCESS_RELOCATE_DIRS}").split()
|
||||
|
||||
for bindir in bindirs:
|
||||
#bb.note ("Processing directory " + bindir)
|
||||
|
@ -90,5 +90,5 @@ def rpath_replace (path, d):
|
|||
process_dir (directory, d)
|
||||
|
||||
python relocatable_binaries_preprocess() {
|
||||
rpath_replace(bb.data.expand('${SYSROOT_DESTDIR}', d), d)
|
||||
rpath_replace(d.expand('${SYSROOT_DESTDIR}'), d)
|
||||
}
|
||||
|
|
|
@ -13,12 +13,12 @@ def raise_sanity_error(msg):
|
|||
|
||||
def check_conf_exists(fn, data):
|
||||
bbpath = []
|
||||
fn = bb.data.expand(fn, data)
|
||||
fn = data.expand(fn)
|
||||
vbbpath = data.getVar("BBPATH")
|
||||
if vbbpath:
|
||||
bbpath += vbbpath.split(":")
|
||||
for p in bbpath:
|
||||
currname = os.path.join(bb.data.expand(p, data), fn)
|
||||
currname = os.path.join(data.expand(p), fn)
|
||||
if os.access(currname, os.R_OK):
|
||||
return True
|
||||
return False
|
||||
|
@ -411,16 +411,16 @@ def check_sanity(e):
|
|||
f.write(current_abi)
|
||||
elif abi == "2" and current_abi == "3":
|
||||
bb.note("Converting staging from layout version 2 to layout version 3")
|
||||
os.system(bb.data.expand("mv ${TMPDIR}/staging ${TMPDIR}/sysroots", e.data))
|
||||
os.system(bb.data.expand("ln -s sysroots ${TMPDIR}/staging", e.data))
|
||||
os.system(bb.data.expand("cd ${TMPDIR}/stamps; for i in */*do_populate_staging; do new=`echo $i | sed -e 's/do_populate_staging/do_populate_sysroot/'`; mv $i $new; done", e.data))
|
||||
os.system(e.data.expand("mv ${TMPDIR}/staging ${TMPDIR}/sysroots"))
|
||||
os.system(e.data.expand("ln -s sysroots ${TMPDIR}/staging"))
|
||||
os.system(e.data.expand("cd ${TMPDIR}/stamps; for i in */*do_populate_staging; do new=`echo $i | sed -e 's/do_populate_staging/do_populate_sysroot/'`; mv $i $new; done"))
|
||||
f = file(abifile, "w")
|
||||
f.write(current_abi)
|
||||
elif abi == "3" and current_abi == "4":
|
||||
bb.note("Converting staging layout from version 3 to layout version 4")
|
||||
if os.path.exists(bb.data.expand("${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS}", e.data)):
|
||||
os.system(bb.data.expand("mv ${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS} ${STAGING_BINDIR_CROSS}", e.data))
|
||||
os.system(bb.data.expand("ln -s ${STAGING_BINDIR_CROSS} ${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS}", e.data))
|
||||
if os.path.exists(e.data.expand("${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS}")):
|
||||
os.system(e.data.expand("mv ${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS} ${STAGING_BINDIR_CROSS}"))
|
||||
os.system(e.data.expand("ln -s ${STAGING_BINDIR_CROSS} ${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS}"))
|
||||
|
||||
f = file(abifile, "w")
|
||||
f.write(current_abi)
|
||||
|
@ -428,7 +428,7 @@ def check_sanity(e):
|
|||
messages = messages + "Staging layout has changed. The cross directory has been deprecated and cross packages are now built under the native sysroot.\nThis requires a rebuild.\n"
|
||||
elif abi == "5" and current_abi == "6":
|
||||
bb.note("Converting staging layout from version 5 to layout version 6")
|
||||
os.system(bb.data.expand("mv ${TMPDIR}/pstagelogs ${SSTATE_MANIFESTS}", e.data))
|
||||
os.system(e.data.expand("mv ${TMPDIR}/pstagelogs ${SSTATE_MANIFESTS}"))
|
||||
f = file(abifile, "w")
|
||||
f.write(current_abi)
|
||||
elif abi == "7" and current_abi == "8":
|
||||
|
|
|
@ -17,7 +17,7 @@ def get_src_tree(d):
|
|||
return
|
||||
|
||||
s_tree_raw = s.split('/')[1]
|
||||
s_tree = bb.data.expand(s_tree_raw, d)
|
||||
s_tree = d.expand(s_tree_raw)
|
||||
|
||||
src_tree_path = os.path.join(workdir, s_tree)
|
||||
try:
|
||||
|
@ -59,7 +59,7 @@ python sourcepkg_do_dumpdata() {
|
|||
distro = d.getVar('DISTRO', True)
|
||||
s_tree = get_src_tree(d)
|
||||
openembeddeddir = os.path.join(workdir, s_tree, distro)
|
||||
dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d))
|
||||
dumpfile = os.path.join(openembeddeddir, d.expand("${P}-${PR}.showdata.dump"))
|
||||
|
||||
try:
|
||||
os.mkdir(openembeddeddir)
|
||||
|
|
|
@ -24,16 +24,16 @@ python () {
|
|||
if bb.data.inherits_class('native', d):
|
||||
d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'))
|
||||
elif bb.data.inherits_class('cross', d):
|
||||
d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${TUNE_PKGARCH}", d))
|
||||
d.setVar('SSTATE_MANMACH', bb.data.expand("${BUILD_ARCH}_${MACHINE}", d))
|
||||
d.setVar('SSTATE_PKGARCH', d.expand("${BUILD_ARCH}_${TUNE_PKGARCH}"))
|
||||
d.setVar('SSTATE_MANMACH', d.expand("${BUILD_ARCH}_${MACHINE}"))
|
||||
elif bb.data.inherits_class('crosssdk', d):
|
||||
d.setVar('SSTATE_PKGARCH', bb.data.expand("${BUILD_ARCH}_${PACKAGE_ARCH}", d))
|
||||
d.setVar('SSTATE_PKGARCH', d.expand("${BUILD_ARCH}_${PACKAGE_ARCH}"))
|
||||
elif bb.data.inherits_class('nativesdk', d):
|
||||
d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}", d))
|
||||
d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}"))
|
||||
elif bb.data.inherits_class('cross-canadian', d):
|
||||
d.setVar('SSTATE_PKGARCH', bb.data.expand("${SDK_ARCH}_${PACKAGE_ARCH}", d))
|
||||
d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}"))
|
||||
else:
|
||||
d.setVar('SSTATE_MANMACH', bb.data.expand("${MACHINE}", d))
|
||||
d.setVar('SSTATE_MANMACH', d.expand("${MACHINE}"))
|
||||
|
||||
# These classes encode staging paths into their scripts data so can only be
|
||||
# reused if we manipulate the paths
|
||||
|
@ -97,8 +97,8 @@ def sstate_install(ss, d):
|
|||
|
||||
sharedfiles = []
|
||||
shareddirs = []
|
||||
bb.mkdirhier(bb.data.expand("${SSTATE_MANIFESTS}", d))
|
||||
manifest = bb.data.expand("${SSTATE_MANFILEPREFIX}.%s" % ss['name'], d)
|
||||
bb.mkdirhier(d.expand("${SSTATE_MANIFESTS}"))
|
||||
manifest = d.expand("${SSTATE_MANFILEPREFIX}.%s" % ss['name'])
|
||||
|
||||
if os.access(manifest, os.R_OK):
|
||||
bb.fatal("Package already staged (%s)?!" % manifest)
|
||||
|
@ -153,7 +153,7 @@ def sstate_installpkg(ss, d):
|
|||
bb.mkdirhier(dir)
|
||||
oe.path.remove(dir)
|
||||
|
||||
sstateinst = bb.data.expand("${WORKDIR}/sstate-install-%s/" % ss['name'], d)
|
||||
sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['name'])
|
||||
sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['name'] + ".tgz"
|
||||
|
||||
if not os.path.exists(sstatepkg):
|
||||
|
@ -246,7 +246,7 @@ def sstate_clean_manifest(manifest, d):
|
|||
def sstate_clean(ss, d):
|
||||
import oe.path
|
||||
|
||||
manifest = bb.data.expand("${SSTATE_MANFILEPREFIX}.%s" % ss['name'], d)
|
||||
manifest = d.expand("${SSTATE_MANFILEPREFIX}.%s" % ss['name'])
|
||||
|
||||
if os.path.exists(manifest):
|
||||
locks = []
|
||||
|
@ -351,7 +351,7 @@ def sstate_package(ss, d):
|
|||
|
||||
tmpdir = d.getVar('TMPDIR', True)
|
||||
|
||||
sstatebuild = bb.data.expand("${WORKDIR}/sstate-build-%s/" % ss['name'], d)
|
||||
sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['name'])
|
||||
sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['name'] + ".tgz"
|
||||
bb.mkdirhier(sstatebuild)
|
||||
bb.mkdirhier(os.path.dirname(sstatepkg))
|
||||
|
@ -397,7 +397,7 @@ def pstaging_fetch(sstatepkg, d):
|
|||
localdata = bb.data.createCopy(d)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
dldir = bb.data.expand("${SSTATE_DIR}", localdata)
|
||||
dldir = localdata.expand("${SSTATE_DIR}")
|
||||
srcuri = "file://" + os.path.basename(sstatepkg)
|
||||
|
||||
bb.mkdirhier(dldir)
|
||||
|
@ -484,7 +484,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
|
|||
}
|
||||
|
||||
for task in range(len(sq_fn)):
|
||||
sstatefile = bb.data.expand("${SSTATE_DIR}/" + sq_hashfn[task] + "_" + mapping[sq_task[task]] + ".tgz", d)
|
||||
sstatefile = d.expand("${SSTATE_DIR}/" + sq_hashfn[task] + "_" + mapping[sq_task[task]] + ".tgz")
|
||||
sstatefile = sstatefile.replace("${BB_TASKHASH}", sq_hash[task])
|
||||
if os.path.exists(sstatefile):
|
||||
bb.debug(2, "SState: Found valid sstate file %s" % sstatefile)
|
||||
|
@ -499,7 +499,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
|
|||
localdata = bb.data.createCopy(d)
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
dldir = bb.data.expand("${SSTATE_DIR}", localdata)
|
||||
dldir = localdata.expand("${SSTATE_DIR}")
|
||||
localdata.setVar('DL_DIR', dldir)
|
||||
localdata.setVar('PREMIRRORS', mirrors)
|
||||
|
||||
|
@ -509,7 +509,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
|
|||
if task in ret:
|
||||
continue
|
||||
|
||||
sstatefile = bb.data.expand("${SSTATE_DIR}/" + sq_hashfn[task] + "_" + mapping[sq_task[task]] + ".tgz", d)
|
||||
sstatefile = d.expand("${SSTATE_DIR}/" + sq_hashfn[task] + "_" + mapping[sq_task[task]] + ".tgz")
|
||||
sstatefile = sstatefile.replace("${BB_TASKHASH}", sq_hash[task])
|
||||
|
||||
srcuri = "file://" + os.path.basename(sstatefile)
|
||||
|
|
|
@ -95,7 +95,7 @@ python build_syslinux_menu () {
|
|||
overrides = localdata.getVar('OVERRIDES')
|
||||
if not overrides:
|
||||
raise bb.build.FuncFailed('OVERRIDES not defined')
|
||||
overrides = bb.data.expand(overrides, localdata)
|
||||
overrides = localdata.expand(overrides)
|
||||
|
||||
localdata.setVar('OVERRIDES', label + ':' + overrides)
|
||||
bb.data.update_data(localdata)
|
||||
|
|
|
@ -16,7 +16,7 @@ addtask clean
|
|||
do_clean[nostamp] = "1"
|
||||
python do_clean() {
|
||||
"""clear the build and temp directories"""
|
||||
dir = bb.data.expand("${WORKDIR}", d)
|
||||
dir = d.expand("${WORKDIR}")
|
||||
bb.note("Removing " + dir)
|
||||
oe.path.remove(dir)
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ def csl_get_gcc_version(d):
|
|||
|
||||
def csl_get_libc_version(d):
|
||||
import os,bb
|
||||
syspath = bb.data.expand('${EXTERNAL_TOOLCHAIN}/${CSL_TARGET_SYS}', d)
|
||||
syspath = d.expand('${EXTERNAL_TOOLCHAIN}/${CSL_TARGET_SYS}')
|
||||
if not syspath:
|
||||
return 'UNKNOWN'
|
||||
|
||||
|
@ -55,7 +55,7 @@ def csl_get_libc_version(d):
|
|||
|
||||
def csl_get_kernel_version(d):
|
||||
import os,bb
|
||||
syspath = bb.data.expand('${EXTERNAL_TOOLCHAIN}/${CSL_TARGET_SYS}', d)
|
||||
syspath = d.expand('${EXTERNAL_TOOLCHAIN}/${CSL_TARGET_SYS}')
|
||||
if not syspath:
|
||||
return 'UNKNOWN'
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ def populate_toolchain_links(d):
|
|||
d = d.createCopy()
|
||||
d.finalize()
|
||||
|
||||
pattern = bb.data.expand('${EXTERNAL_TOOLCHAIN}/bin/${TARGET_PREFIX}*', d)
|
||||
pattern = d.expand('${EXTERNAL_TOOLCHAIN}/bin/${TARGET_PREFIX}*')
|
||||
files = glob(pattern)
|
||||
if not files:
|
||||
bb.fatal("Unable to populate toolchain binary symlinks")
|
||||
|
|
|
@ -7,7 +7,7 @@ def typed_value(key, d):
|
|||
var_type = d.getVarFlag(key, 'type')
|
||||
flags = d.getVarFlags(key)
|
||||
if flags is not None:
|
||||
flags = dict((flag, bb.data.expand(value, d))
|
||||
flags = dict((flag, d.expand(value))
|
||||
for flag, value in flags.iteritems())
|
||||
else:
|
||||
flags = {}
|
||||
|
|
|
@ -26,15 +26,15 @@ def read_pkgdatafile(fn):
|
|||
return pkgdata
|
||||
|
||||
def get_subpkgedata_fn(pkg, d):
|
||||
archs = bb.data.expand("${PACKAGE_ARCHS}", d).split(" ")
|
||||
archs = d.expand("${PACKAGE_ARCHS}").split(" ")
|
||||
archs.reverse()
|
||||
pkgdata = bb.data.expand('${TMPDIR}/pkgdata/', d)
|
||||
targetdir = bb.data.expand('${TARGET_VENDOR}-${TARGET_OS}/runtime/', d)
|
||||
pkgdata = d.expand('${TMPDIR}/pkgdata/')
|
||||
targetdir = d.expand('${TARGET_VENDOR}-${TARGET_OS}/runtime/')
|
||||
for arch in archs:
|
||||
fn = pkgdata + arch + targetdir + pkg
|
||||
if os.path.exists(fn):
|
||||
return fn
|
||||
return bb.data.expand('${PKGDATA_DIR}/runtime/%s' % pkg, d)
|
||||
return d.expand('${PKGDATA_DIR}/runtime/%s' % pkg)
|
||||
|
||||
def has_subpkgdata(pkg, d):
|
||||
return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
|
||||
|
@ -43,11 +43,11 @@ def read_subpkgdata(pkg, d):
|
|||
return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
|
||||
|
||||
def has_pkgdata(pn, d):
|
||||
fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
|
||||
fn = d.expand('${PKGDATA_DIR}/%s' % pn)
|
||||
return os.access(fn, os.R_OK)
|
||||
|
||||
def read_pkgdata(pn, d):
|
||||
fn = bb.data.expand('${PKGDATA_DIR}/%s' % pn, d)
|
||||
fn = d.expand('${PKGDATA_DIR}/%s' % pn)
|
||||
return read_pkgdatafile(fn)
|
||||
|
||||
#
|
||||
|
|
|
@ -75,8 +75,8 @@ python populate_packages_prepend() {
|
|||
packages = []
|
||||
multilib_prefix = (d.getVar("MLPREFIX", True) or "")
|
||||
hook = lambda file,pkg,b,c,d:packages.append((file,pkg))
|
||||
plugin_dir = bb.data.expand('${libdir}/connman/plugins/', d)
|
||||
plugin_name = bb.data.expand('${PN}-plugin-%s', d)
|
||||
plugin_dir = d.expand('${libdir}/connman/plugins/')
|
||||
plugin_name = d.expand('${PN}-plugin-%s')
|
||||
do_split_packages(d, plugin_dir, '^(.*).so$', plugin_name, '${PN} plugin for %s', extra_depends='', hook=hook )
|
||||
for (file, package) in packages:
|
||||
plugintype = package.split( '-' )[-1]
|
||||
|
|
|
@ -63,10 +63,10 @@ python populate_packages_prepend() {
|
|||
# time before the files from the package may be available, hence
|
||||
# storing the data from the files in the preinst directly.
|
||||
|
||||
f = open(bb.data.expand("${STAGING_DATADIR}/base-passwd/passwd.master", d), 'r')
|
||||
f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
|
||||
passwd = "".join(f.readlines())
|
||||
f.close()
|
||||
f = open(bb.data.expand("${STAGING_DATADIR}/base-passwd/group.master", d), 'r')
|
||||
f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
|
||||
group = "".join(f.readlines())
|
||||
f.close()
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ python () {
|
|||
if "${OE_FEATURES}":
|
||||
d.setVar('configmangle_append',
|
||||
"/^### DISTRO FEATURES$/a\\\n%s\n\n" %
|
||||
("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))))
|
||||
("\\n".join((d.expand("${OE_FEATURES}").split("\n")))))
|
||||
d.setVar('configmangle_append',
|
||||
"/^### CROSS$/a\\\n%s\n" %
|
||||
("\\n".join(["CONFIG_CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"",
|
||||
|
|
|
@ -178,8 +178,8 @@ do_install() {
|
|||
}
|
||||
|
||||
python populate_packages_prepend () {
|
||||
base_libdir = bb.data.expand("${base_libdir}", d)
|
||||
pnbase = bb.data.expand("${PN}-lib%s", d)
|
||||
base_libdir = d.expand("${base_libdir}")
|
||||
pnbase = d.expand("${PN}-lib%s")
|
||||
do_split_packages(d, base_libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
|
||||
}
|
||||
|
||||
|
|
|
@ -142,7 +142,7 @@ python () {
|
|||
if "${OE_FEATURES}":
|
||||
d.setVar('configmangle_append',
|
||||
"/^### DISTRO FEATURES$/a\\\n%s\n\n" %
|
||||
("\\n".join((bb.data.expand("${OE_FEATURES}", d).split("\n")))))
|
||||
("\\n".join((d.expand("${OE_FEATURES}").split("\n")))))
|
||||
d.setVar('configmangle_append',
|
||||
"/^### CROSS$/a\\\n%s\n" %
|
||||
("\\n".join(["CROSS_COMPILER_PREFIX=\"${TARGET_PREFIX}\"",
|
||||
|
|
|
@ -18,7 +18,7 @@ python do_install_config () {
|
|||
data = infile.read()
|
||||
infile.close()
|
||||
|
||||
data = bb.data.expand(data, d)
|
||||
data = d.expand(data)
|
||||
|
||||
outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt')
|
||||
if not os.path.exists(outdir):
|
||||
|
|
|
@ -79,7 +79,7 @@ do_clean[lockfiles] = "${SW}.clean.lock"
|
|||
|
||||
python workshared_clean () {
|
||||
"""clear the source directory"""
|
||||
dir = bb.data.expand("${SW}", d)
|
||||
dir = d.expand("${SW}")
|
||||
bb.note("Removing " + dir)
|
||||
oe.path.remove(dir)
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
INHIBIT_PACKAGE_STRIP = "1"
|
||||
|
||||
# Compute how to get from libexecdir to bindir in python (easier than shell)
|
||||
BINRELPATH = "${@oe.path.relative(bb.data.expand("${libexecdir}/gcc/${TARGET_SYS}/${BINV}", d), bb.data.expand("${STAGING_DIR_NATIVE}${prefix_native}/bin/${MULTIMACH_TARGET_SYS}", d))}"
|
||||
BINRELPATH = "${@oe.path.relative(d.expand("${libexecdir}/gcc/${TARGET_SYS}/${BINV}"), d.expand("${STAGING_DIR_NATIVE}${prefix_native}/bin/${MULTIMACH_TARGET_SYS}"))}"
|
||||
|
||||
do_install () {
|
||||
oe_runmake 'DESTDIR=${D}' install-host
|
||||
|
|
|
@ -290,7 +290,7 @@ PACKAGES_append = " perl-modules "
|
|||
RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', True).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
|
||||
|
||||
python populate_packages_prepend () {
|
||||
libdir = bb.data.expand('${libdir}/perl/${PV}', d)
|
||||
libdir = d.expand('${libdir}/perl/${PV}')
|
||||
do_split_packages(d, libdir, 'auto/(Encode/.[^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
|
||||
do_split_packages(d, libdir, 'auto/([^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
|
||||
do_split_packages(d, libdir, 'Module/([^\/]*).*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
|
||||
|
|
|
@ -58,6 +58,6 @@ CONFFILES_${PN} = "${sysconfdir}/lighttpd.conf"
|
|||
PACKAGES_DYNAMIC = "lighttpd-module-*"
|
||||
|
||||
python populate_packages_prepend () {
|
||||
lighttpd_libdir = bb.data.expand('${libdir}', d)
|
||||
lighttpd_libdir = d.expand('${libdir}')
|
||||
do_split_packages(d, lighttpd_libdir, '^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
|
||||
}
|
||||
|
|
|
@ -61,9 +61,9 @@ python populate_packages_prepend () {
|
|||
d.setVar('FILES_' + pn, nf)
|
||||
|
||||
dvar = bb.data.expand('${WORKDIR}/package', d, True)
|
||||
pam_libdir = bb.data.expand('${base_libdir}/security', d)
|
||||
pam_sbindir = bb.data.expand('${sbindir}', d)
|
||||
pam_filterdir = bb.data.expand('${base_libdir}/security/pam_filter', d)
|
||||
pam_libdir = d.expand('${base_libdir}/security')
|
||||
pam_sbindir = d.expand('${sbindir}')
|
||||
pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
|
||||
|
||||
do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', extra_depends='')
|
||||
pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_chkpwd')
|
||||
|
|
|
@ -60,7 +60,7 @@ PACKAGES_DYNAMIC_virtclass-native = ""
|
|||
python populate_packages_prepend () {
|
||||
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
|
||||
|
||||
loaders_root = bb.data.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders', d)
|
||||
loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
|
||||
|
||||
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ python populate_packages_prepend () {
|
|||
prologue = d.getVar("postinst_prologue", True)
|
||||
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
|
||||
|
||||
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
|
||||
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
|
||||
loaders_root = os.path.join(gtk_libdir, 'loaders')
|
||||
immodules_root = os.path.join(gtk_libdir, 'immodules')
|
||||
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
|
||||
|
|
|
@ -37,7 +37,7 @@ python populate_packages_prepend () {
|
|||
prologue = d.getVar("postinst_prologue", True)
|
||||
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
|
||||
|
||||
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
|
||||
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
|
||||
loaders_root = os.path.join(gtk_libdir, 'loaders')
|
||||
immodules_root = os.path.join(gtk_libdir, 'immodules')
|
||||
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
|
||||
|
|
|
@ -43,7 +43,7 @@ python populate_packages_prepend () {
|
|||
|
||||
prologue = d.getVar("postinst_prologue", True)
|
||||
|
||||
gtk_libdir = bb.data.expand('${libdir}/gtk-2.0/${LIBV}', d)
|
||||
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
|
||||
immodules_root = os.path.join(gtk_libdir, 'immodules')
|
||||
printmodules_root = os.path.join(gtk_libdir, 'printbackends');
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ EXTRA_OECONF = "\
|
|||
#
|
||||
#python populate_packages_prepend () {
|
||||
# import os.path
|
||||
# inputdrivers_libdir = bb.data.expand('${libdir}/directfb-${RV}/inputdrivers', d)
|
||||
# inputdrivers_libdir = d.expand('${libdir}/directfb-${RV}/inputdrivers')
|
||||
# do_split_packages(d, inputdrivers_libdir, '*.so$', 'directfb-inputdrivers-%s', 'Directfb plugin for %s')
|
||||
#}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ fi
|
|||
python populate_packages_prepend () {
|
||||
prologue = d.getVar("postinst_prologue", True)
|
||||
|
||||
modules_root = bb.data.expand('${libdir}/pango/${LIBV}/modules', d)
|
||||
modules_root = d.expand('${libdir}/pango/${LIBV}/modules')
|
||||
|
||||
do_split_packages(d, modules_root, '^pango-(.*)\.so$', 'pango-module-%s', 'Pango module %s', prologue + 'pango-querymodules > /etc/pango/pango.modules')
|
||||
}
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
LIBV = "0.10"
|
||||
|
||||
python populate_packages_prepend () {
|
||||
gst_libdir = bb.data.expand('${libdir}/gstreamer-${LIBV}', d)
|
||||
gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}')
|
||||
postinst = d.getVar('plugin_postinst', True)
|
||||
glibdir = d.getVar('libdir', True)
|
||||
|
||||
do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
|
||||
do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', bb.data.expand('${PN}-%s', d), 'GStreamer plugin for %s', postinst=postinst, extra_depends=bb.data.expand('${PN}',d))
|
||||
do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', bb.data.expand('${PN}-%s-dev', d), 'GStreamer plugin for %s (development files)', extra_depends=bb.data.expand('${PN}-dev',d))
|
||||
do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', bb.data.expand('${PN}-%s-staticdev', d), 'GStreamer plugin for %s (static development files)', extra_depends=bb.data.expand('${PN}-staticdev',d))
|
||||
do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends=d.expand('${PN}'))
|
||||
do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', d.expand('${PN}-%s-dev'), 'GStreamer plugin for %s (development files)', extra_depends=d.expand('${PN}-dev'))
|
||||
do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', d.expand('${PN}-%s-staticdev'), 'GStreamer plugin for %s (static development files)', extra_depends=d.expand('${PN}-staticdev'))
|
||||
|
||||
pn = d.getVar('PN', True)
|
||||
metapkg = pn + '-meta'
|
||||
|
|
|
@ -106,7 +106,7 @@ pkg_postinst_${PN}-server() {
|
|||
python populate_packages_prepend() {
|
||||
#d.setVar('PKG_pulseaudio', 'pulseaudio')
|
||||
|
||||
plugindir = bb.data.expand('${libdir}/pulse-${PV}/modules/', d)
|
||||
plugindir = d.expand('${libdir}/pulse-${PV}/modules/')
|
||||
do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' )
|
||||
do_split_packages(d, plugindir, '^lib(.*)\.so$', 'pulseaudio-lib-%s', 'PulseAudio library for %s', extra_depends='' )
|
||||
}
|
||||
|
|
|
@ -31,8 +31,8 @@ PACKAGES_DYNAMIC = "qmmp-plugin-* "
|
|||
|
||||
python populate_packages_prepend () {
|
||||
import os
|
||||
qmmp_libdir = bb.data.expand('${libdir}/qmmp', d)
|
||||
gd = bb.data.expand('${D}/${libdir}/qmmp', d)
|
||||
qmmp_libdir = d.expand('${libdir}/qmmp')
|
||||
gd = d.expand('${D}/${libdir}/qmmp')
|
||||
plug_dirs = os.listdir(gd)
|
||||
|
||||
for plug_dir in plug_dirs:
|
||||
|
|
|
@ -257,17 +257,17 @@ do_compile() {
|
|||
}
|
||||
|
||||
python populate_packages_prepend() {
|
||||
translation_dir = bb.data.expand('${datadir}/${QT_DIR_NAME}/translations/', d)
|
||||
translation_name = bb.data.expand('${QT_BASE_NAME}-translation-%s', d)
|
||||
translation_dir = d.expand('${datadir}/${QT_DIR_NAME}/translations/')
|
||||
translation_name = d.expand('${QT_BASE_NAME}-translation-%s')
|
||||
do_split_packages(d, translation_dir, '^(assistant|designer|linguist|qt|qtconfig|qvfb)_(.*)\.qm$', translation_name, '${PN} translation for %s', extra_depends='' )
|
||||
|
||||
phrasebook_dir = bb.data.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/', d)
|
||||
phrasebook_name = bb.data.expand('${QT_BASE_NAME}-phrasebook-%s', d)
|
||||
phrasebook_dir = d.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/')
|
||||
phrasebook_name = d.expand('${QT_BASE_NAME}-phrasebook-%s')
|
||||
import os;
|
||||
if os.path.exists("%s%s" % (bb.data.expand('${D}',d), phrasebook_dir)):
|
||||
if os.path.exists("%s%s" % (d.expand('${D}'), phrasebook_dir)):
|
||||
do_split_packages(d, phrasebook_dir, '^(.*)\.qph$', phrasebook_name, '${PN} phrasebook for %s', extra_depends='' )
|
||||
else:
|
||||
bb.note("The path does not exist:", bb.data.expand('${D}', d), phrasebook_dir)
|
||||
bb.note("The path does not exist:", d.expand('${D}'), phrasebook_dir)
|
||||
|
||||
# Package all the plugins and their -dbg version and create a meta package
|
||||
def qtopia_split(path, name, glob):
|
||||
|
@ -275,17 +275,17 @@ python populate_packages_prepend() {
|
|||
Split the package into a normal and -dbg package and then add the
|
||||
new packages to the meta package.
|
||||
"""
|
||||
plugin_dir = bb.data.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path, d)
|
||||
if not os.path.exists("%s%s" % (bb.data.expand('${D}',d), plugin_dir)):
|
||||
bb.note("The path does not exist:", bb.data.expand('${D}', d), plugin_dir)
|
||||
plugin_dir = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path)
|
||||
if not os.path.exists("%s%s" % (d.expand('${D}'), plugin_dir)):
|
||||
bb.note("The path does not exist:", d.expand('${D}'), plugin_dir)
|
||||
return
|
||||
|
||||
plugin_name = bb.data.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name, d)
|
||||
plugin_name = d.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name)
|
||||
dev_packages = []
|
||||
dev_hook = lambda file,pkg,b,c,d:dev_packages.append((file,pkg))
|
||||
do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook)
|
||||
# Create a -dbg package as well
|
||||
plugin_dir_dbg = bb.data.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path, d)
|
||||
plugin_dir_dbg = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path)
|
||||
packages = d.getVar('PACKAGES')
|
||||
for (file,package) in dev_packages:
|
||||
packages = "%s %s-dbg" % (packages, package)
|
||||
|
|
|
@ -46,7 +46,7 @@ do_compile () {
|
|||
}
|
||||
|
||||
python populate_packages_prepend () {
|
||||
pcre_libdir = bb.data.expand('${libdir}', d)
|
||||
pcre_libdir = d.expand('${libdir}')
|
||||
do_split_packages(d, pcre_libdir, '^lib(.*)\.so\.+', 'lib%s', 'libpcre %s library', extra_depends='', allow_links=True)
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue