Convert tab indentation in python functions into four-space

(From OE-Core rev: 604d46c686d06d62d5a07b9c7f4fa170f99307d8)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2012-07-11 17:33:43 +00:00
parent 99203edda6
commit bfd279de32
71 changed files with 3585 additions and 3587 deletions

View File

@ -1,23 +1,23 @@
def autotools_dep_prepend(d): def autotools_dep_prepend(d):
if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True):
return '' return ''
pn = d.getVar('PN', True) pn = d.getVar('PN', True)
deps = '' deps = ''
if pn in ['autoconf-native', 'automake-native', 'help2man-native']: if pn in ['autoconf-native', 'automake-native', 'help2man-native']:
return deps return deps
deps += 'autoconf-native automake-native ' deps += 'autoconf-native automake-native '
if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"): if not pn in ['libtool', 'libtool-native'] and not pn.endswith("libtool-cross"):
deps += 'libtool-native ' deps += 'libtool-native '
if not bb.data.inherits_class('native', d) \ if not bb.data.inherits_class('native', d) \
and not bb.data.inherits_class('nativesdk', d) \ and not bb.data.inherits_class('nativesdk', d) \
and not bb.data.inherits_class('cross', d) \ and not bb.data.inherits_class('cross', d) \
and not d.getVar('INHIBIT_DEFAULT_DEPS', True): and not d.getVar('INHIBIT_DEFAULT_DEPS', True):
deps += 'libtool-cross ' deps += 'libtool-cross '
return deps + 'gnu-config-native ' return deps + 'gnu-config-native '
EXTRA_OEMAKE = "" EXTRA_OEMAKE = ""
@ -35,15 +35,15 @@ EXTRA_AUTORECONF = "--exclude=autopoint"
export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}" export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}"
def autotools_set_crosscompiling(d): def autotools_set_crosscompiling(d):
if not bb.data.inherits_class('native', d): if not bb.data.inherits_class('native', d):
return " cross_compiling=yes" return " cross_compiling=yes"
return "" return ""
def append_libtool_sysroot(d): def append_libtool_sysroot(d):
# Only supply libtool sysroot option for non-native packages # Only supply libtool sysroot option for non-native packages
if not bb.data.inherits_class('native', d): if not bb.data.inherits_class('native', d):
return '--with-libtool-sysroot=${STAGING_DIR_HOST}' return '--with-libtool-sysroot=${STAGING_DIR_HOST}'
return "" return ""
# EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}" # EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}"

View File

@ -33,7 +33,7 @@ def oe_import(d):
python oe_import_eh () { python oe_import_eh () {
if isinstance(e, bb.event.ConfigParsed): if isinstance(e, bb.event.ConfigParsed):
oe_import(e.data) oe_import(e.data)
} }
addhandler oe_import_eh addhandler oe_import_eh
@ -50,21 +50,20 @@ oe_runmake() {
def base_dep_prepend(d): def base_dep_prepend(d):
# #
# Ideally this will check a flag so we will operate properly in # Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in # the case where host == build == target, for now we don't work in
# that case though. # that case though.
# #
deps = "" deps = ""
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not # we need that built is the responsibility of the patch function / class, not
# the application. # the application.
if not d.getVar('INHIBIT_DEFAULT_DEPS'): if not d.getVar('INHIBIT_DEFAULT_DEPS'):
if (d.getVar('HOST_SYS', True) != if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)):
d.getVar('BUILD_SYS', True)): deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc "
deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " return deps
return deps
BASEDEPENDS = "${@base_dep_prepend(d)}" BASEDEPENDS = "${@base_dep_prepend(d)}"
@ -80,61 +79,61 @@ do_fetch[dirs] = "${DL_DIR}"
do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
python base_do_fetch() { python base_do_fetch() {
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
try: try:
fetcher = bb.fetch2.Fetch(src_uri, localdata) fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.download() fetcher.download()
except bb.fetch2.BBFetchException, e: except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e) raise bb.build.FuncFailed(e)
} }
addtask unpack after do_fetch addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}" do_unpack[dirs] = "${WORKDIR}"
do_unpack[cleandirs] = "${S}/patches" do_unpack[cleandirs] = "${S}/patches"
python base_do_unpack() { python base_do_unpack() {
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
rootdir = localdata.getVar('WORKDIR', True) rootdir = localdata.getVar('WORKDIR', True)
try: try:
fetcher = bb.fetch2.Fetch(src_uri, localdata) fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.unpack(rootdir) fetcher.unpack(rootdir)
except bb.fetch2.BBFetchException, e: except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e) raise bb.build.FuncFailed(e)
} }
GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc" GIT_CONFIG_PATH = "${STAGING_DIR_NATIVE}/etc"
GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig" GIT_CONFIG = "${GIT_CONFIG_PATH}/gitconfig"
def generate_git_config(e): def generate_git_config(e):
from bb import data from bb import data
if data.getVar('GIT_CORE_CONFIG', e.data, True): if data.getVar('GIT_CORE_CONFIG', e.data, True):
gitconfig_path = e.data.getVar('GIT_CONFIG', True) gitconfig_path = e.data.getVar('GIT_CONFIG', True)
proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True) proxy_command = " gitProxy = %s\n" % data.getVar('OE_GIT_PROXY_COMMAND', e.data, True)
bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}")) bb.mkdirhier(e.data.expand("${GIT_CONFIG_PATH}"))
if (os.path.exists(gitconfig_path)): if (os.path.exists(gitconfig_path)):
os.remove(gitconfig_path) os.remove(gitconfig_path)
f = open(gitconfig_path, 'w') f = open(gitconfig_path, 'w')
f.write("[core]\n") f.write("[core]\n")
ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split() ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split()
for ignore_host in ignore_hosts: for ignore_host in ignore_hosts:
f.write(" gitProxy = none for %s\n" % ignore_host) f.write(" gitProxy = none for %s\n" % ignore_host)
f.write(proxy_command) f.write(proxy_command)
f.close f.close
def pkgarch_mapping(d): def pkgarch_mapping(d):
# Compatibility mappings of TUNE_PKGARCH (opt in) # Compatibility mappings of TUNE_PKGARCH (opt in)
@ -205,69 +204,69 @@ def preferred_ml_updates(d):
def get_layers_branch_rev(d): def get_layers_branch_rev(d):
layers = (d.getVar("BBLAYERS", True) or "").split() layers = (d.getVar("BBLAYERS", True) or "").split()
layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \
base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_branch(i, None).strip(), \
base_get_metadata_git_revision(i, None)) \ base_get_metadata_git_revision(i, None)) \
for i in layers] for i in layers]
i = len(layers_branch_rev)-1 i = len(layers_branch_rev)-1
p1 = layers_branch_rev[i].find("=") p1 = layers_branch_rev[i].find("=")
s1 = layers_branch_rev[i][p1:] s1 = layers_branch_rev[i][p1:]
while i > 0: while i > 0:
p2 = layers_branch_rev[i-1].find("=") p2 = layers_branch_rev[i-1].find("=")
s2= layers_branch_rev[i-1][p2:] s2= layers_branch_rev[i-1][p2:]
if s1 == s2: if s1 == s2:
layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2] layers_branch_rev[i-1] = layers_branch_rev[i-1][0:p2]
i -= 1 i -= 1
else: else:
i -= 1 i -= 1
p1 = layers_branch_rev[i].find("=") p1 = layers_branch_rev[i].find("=")
s1= layers_branch_rev[i][p1:] s1= layers_branch_rev[i][p1:]
return layers_branch_rev return layers_branch_rev
BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars" BUILDCFG_FUNCS ??= "buildcfg_vars get_layers_branch_rev buildcfg_neededvars"
BUILDCFG_FUNCS[type] = "list" BUILDCFG_FUNCS[type] = "list"
def buildcfg_vars(d): def buildcfg_vars(d):
statusvars = oe.data.typed_value('BUILDCFG_VARS', d) statusvars = oe.data.typed_value('BUILDCFG_VARS', d)
for var in statusvars: for var in statusvars:
value = d.getVar(var, True) value = d.getVar(var, True)
if value is not None: if value is not None:
yield '%-17s = "%s"' % (var, value) yield '%-17s = "%s"' % (var, value)
def buildcfg_neededvars(d): def buildcfg_neededvars(d):
needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d)
pesteruser = [] pesteruser = []
for v in needed_vars: for v in needed_vars:
val = d.getVar(v, True) val = d.getVar(v, True)
if not val or val == 'INVALID': if not val or val == 'INVALID':
pesteruser.append(v) pesteruser.append(v)
if pesteruser: if pesteruser:
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser)) bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
addhandler base_eventhandler addhandler base_eventhandler
python base_eventhandler() { python base_eventhandler() {
if isinstance(e, bb.event.ConfigParsed): if isinstance(e, bb.event.ConfigParsed):
e.data.setVar('BB_VERSION', bb.__version__) e.data.setVar('BB_VERSION', bb.__version__)
generate_git_config(e) generate_git_config(e)
pkgarch_mapping(e.data) pkgarch_mapping(e.data)
preferred_ml_updates(e.data) preferred_ml_updates(e.data)
if isinstance(e, bb.event.BuildStarted): if isinstance(e, bb.event.BuildStarted):
statuslines = [] statuslines = []
for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data): for func in oe.data.typed_value('BUILDCFG_FUNCS', e.data):
g = globals() g = globals()
if func not in g: if func not in g:
bb.warn("Build configuration function '%s' does not exist" % func) bb.warn("Build configuration function '%s' does not exist" % func)
else: else:
flines = g[func](e.data) flines = g[func](e.data)
if flines: if flines:
statuslines.extend(flines) statuslines.extend(flines)
statusheader = e.data.getVar('BUILDCFG_HEADER', True) statusheader = e.data.getVar('BUILDCFG_HEADER', True)
bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines)))
} }
addtask configure after do_patch addtask configure after do_patch
@ -546,18 +545,18 @@ python do_cleansstate() {
addtask cleanall after do_cleansstate addtask cleanall after do_cleansstate
python do_cleanall() { python do_cleanall() {
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
try: try:
fetcher = bb.fetch2.Fetch(src_uri, localdata) fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.clean() fetcher.clean()
except bb.fetch2.BBFetchException, e: except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e) raise bb.build.FuncFailed(e)
} }
do_cleanall[nostamp] = "1" do_cleanall[nostamp] = "1"

View File

@ -92,8 +92,8 @@ build_boot_dd() {
} }
python do_bootdirectdisk() { python do_bootdirectdisk() {
bb.build.exec_func('build_syslinux_cfg', d) bb.build.exec_func('build_syslinux_cfg', d)
bb.build.exec_func('build_boot_dd', d) bb.build.exec_func('build_boot_dd', d)
} }
addtask bootdirectdisk before do_build addtask bootdirectdisk before do_build

View File

@ -42,15 +42,15 @@ EFI_CLASS = "${@base_contains("MACHINE_FEATURES", "efi", "grub-efi", "dummy", d)
# contain "efi". This way legacy is supported by default if neither is # contain "efi". This way legacy is supported by default if neither is
# specified, maintaining the original behavior. # specified, maintaining the original behavior.
def pcbios(d): def pcbios(d):
pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d) pcbios = base_contains("MACHINE_FEATURES", "pcbios", "1", "0", d)
if pcbios == "0": if pcbios == "0":
pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d) pcbios = base_contains("MACHINE_FEATURES", "efi", "0", "1", d)
return pcbios return pcbios
def pcbios_class(d): def pcbios_class(d):
if d.getVar("PCBIOS", True) == "1": if d.getVar("PCBIOS", True) == "1":
return "syslinux" return "syslinux"
return "dummy" return "dummy"
PCBIOS = "${@pcbios(d)}" PCBIOS = "${@pcbios(d)}"
PCBIOS_CLASS = "${@pcbios_class(d)}" PCBIOS_CLASS = "${@pcbios_class(d)}"
@ -181,12 +181,12 @@ build_hddimg() {
} }
python do_bootimg() { python do_bootimg() {
if d.getVar("PCBIOS", True) == "1": if d.getVar("PCBIOS", True) == "1":
bb.build.exec_func('build_syslinux_cfg', d) bb.build.exec_func('build_syslinux_cfg', d)
if d.getVar("EFI", True) == "1": if d.getVar("EFI", True) == "1":
bb.build.exec_func('build_grub_cfg', d) bb.build.exec_func('build_grub_cfg', d)
bb.build.exec_func('build_hddimg', d) bb.build.exec_func('build_hddimg', d)
bb.build.exec_func('build_iso', d) bb.build.exec_func('build_iso', d)
} }
addtask bootimg before do_build addtask bootimg before do_build

View File

@ -11,28 +11,28 @@ PERL_OWN_DIR = "${@["", "/perl-native"][(bb.data.inherits_class('native', d))]}"
# Determine the staged version of perl from the perl configuration file # Determine the staged version of perl from the perl configuration file
def get_perl_version(d): def get_perl_version(d):
import re import re
cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh') cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh')
try: try:
f = open(cfg, 'r') f = open(cfg, 'r')
except IOError: except IOError:
return None return None
l = f.readlines(); l = f.readlines();
f.close(); f.close();
r = re.compile("^version='(\d*\.\d*\.\d*)'") r = re.compile("^version='(\d*\.\d*\.\d*)'")
for s in l: for s in l:
m = r.match(s) m = r.match(s)
if m: if m:
return m.group(1) return m.group(1)
return None return None
# Determine where the library directories are # Determine where the library directories are
def perl_get_libdirs(d): def perl_get_libdirs(d):
libdir = d.getVar('libdir', True) libdir = d.getVar('libdir', True)
if is_target(d) == "no": if is_target(d) == "no":
libdir += '/perl-native' libdir += '/perl-native'
libdir += '/perl' libdir += '/perl'
return libdir return libdir
def is_target(d): def is_target(d):
if not bb.data.inherits_class('native', d): if not bb.data.inherits_class('native', d):

View File

@ -20,105 +20,105 @@ python () {
} }
python debian_package_name_hook () { python debian_package_name_hook () {
import glob, copy, stat, errno, re import glob, copy, stat, errno, re
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST', True)
packages = d.getVar('PACKAGES', True) packages = d.getVar('PACKAGES', True)
bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$") bin_re = re.compile(".*/s?" + os.path.basename(d.getVar("bindir", True)) + "$")
lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$") lib_re = re.compile(".*/" + os.path.basename(d.getVar("libdir", True)) + "$")
so_re = re.compile("lib.*\.so") so_re = re.compile("lib.*\.so")
def socrunch(s): def socrunch(s):
s = s.lower().replace('_', '-') s = s.lower().replace('_', '-')
m = re.match("^(.*)(.)\.so\.(.*)$", s) m = re.match("^(.*)(.)\.so\.(.*)$", s)
if m is None: if m is None:
return None return None
if m.group(2) in '0123456789': if m.group(2) in '0123456789':
bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3)) bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
else: else:
bin = m.group(1) + m.group(2) + m.group(3) bin = m.group(1) + m.group(2) + m.group(3)
dev = m.group(1) + m.group(2) dev = m.group(1) + m.group(2)
return (bin, dev) return (bin, dev)
def isexec(path): def isexec(path):
try: try:
s = os.stat(path) s = os.stat(path)
except (os.error, AttributeError): except (os.error, AttributeError):
return 0 return 0
return (s[stat.ST_MODE] & stat.S_IEXEC) return (s[stat.ST_MODE] & stat.S_IEXEC)
def auto_libname(packages, orig_pkg): def auto_libname(packages, orig_pkg):
sonames = [] sonames = []
has_bins = 0 has_bins = 0
has_libs = 0 has_libs = 0
pkg_dir = os.path.join(pkgdest, orig_pkg) pkg_dir = os.path.join(pkgdest, orig_pkg)
for root, dirs, files in os.walk(pkg_dir): for root, dirs, files in os.walk(pkg_dir):
if bin_re.match(root) and files: if bin_re.match(root) and files:
has_bins = 1 has_bins = 1
if lib_re.match(root) and files: if lib_re.match(root) and files:
has_libs = 1 has_libs = 1
for f in files: for f in files:
if so_re.match(f): if so_re.match(f):
fp = os.path.join(root, f) fp = os.path.join(root, f)
cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null" cmd = (d.getVar('TARGET_PREFIX', True) or "") + "objdump -p " + fp + " 2>/dev/null"
fd = os.popen(cmd) fd = os.popen(cmd)
lines = fd.readlines() lines = fd.readlines()
fd.close() fd.close()
for l in lines: for l in lines:
m = re.match("\s+SONAME\s+([^\s]*)", l) m = re.match("\s+SONAME\s+([^\s]*)", l)
if m and not m.group(1) in sonames: if m and not m.group(1) in sonames:
sonames.append(m.group(1)) sonames.append(m.group(1))
bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames)) bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
soname = None soname = None
if len(sonames) == 1: if len(sonames) == 1:
soname = sonames[0] soname = sonames[0]
elif len(sonames) > 1: elif len(sonames) > 1:
lead = d.getVar('LEAD_SONAME', True) lead = d.getVar('LEAD_SONAME', True)
if lead: if lead:
r = re.compile(lead) r = re.compile(lead)
filtered = [] filtered = []
for s in sonames: for s in sonames:
if r.match(s): if r.match(s):
filtered.append(s) filtered.append(s)
if len(filtered) == 1: if len(filtered) == 1:
soname = filtered[0] soname = filtered[0]
elif len(filtered) > 1: elif len(filtered) > 1:
bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead)) bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
else: else:
bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead)) bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
else: else:
bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames)) bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
if has_libs and not has_bins and soname: if has_libs and not has_bins and soname:
soname_result = socrunch(soname) soname_result = socrunch(soname)
if soname_result: if soname_result:
(pkgname, devname) = soname_result (pkgname, devname) = soname_result
for pkg in packages.split(): for pkg in packages.split():
if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)): if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
continue continue
debian_pn = d.getVar('DEBIANNAME_' + pkg) debian_pn = d.getVar('DEBIANNAME_' + pkg)
if debian_pn: if debian_pn:
newpkg = debian_pn newpkg = debian_pn
elif pkg == orig_pkg: elif pkg == orig_pkg:
newpkg = pkgname newpkg = pkgname
else: else:
newpkg = pkg.replace(orig_pkg, devname, 1) newpkg = pkg.replace(orig_pkg, devname, 1)
mlpre=d.getVar('MLPREFIX', True) mlpre=d.getVar('MLPREFIX', True)
if mlpre: if mlpre:
if not newpkg.find(mlpre) == 0: if not newpkg.find(mlpre) == 0:
newpkg = mlpre + newpkg newpkg = mlpre + newpkg
if newpkg != pkg: if newpkg != pkg:
d.setVar('PKG_' + pkg, newpkg) d.setVar('PKG_' + pkg, newpkg)
# reversed sort is needed when some package is substring of another # reversed sort is needed when some package is substring of another
# ie in ncurses we get without reverse sort: # ie in ncurses we get without reverse sort:
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5 # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5
# and later # and later
# DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
# so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True):
auto_libname(packages, pkg) auto_libname(packages, pkg)
} }
EXPORT_FUNCTIONS package_name_hook EXPORT_FUNCTIONS package_name_hook

View File

@ -39,33 +39,33 @@ done
} }
python populate_packages_append () { python populate_packages_append () {
import re import re
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST', True)
for pkg in packages: for pkg in packages:
schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg)
schemas = [] schemas = []
schema_re = re.compile(".*\.schemas$") schema_re = re.compile(".*\.schemas$")
if os.path.exists(schema_dir): if os.path.exists(schema_dir):
for f in os.listdir(schema_dir): for f in os.listdir(schema_dir):
if schema_re.match(f): if schema_re.match(f):
schemas.append(f) schemas.append(f)
if schemas != []: if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg) bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
d.setVar('SCHEMA_FILES', " ".join(schemas)) d.setVar('SCHEMA_FILES', " ".join(schemas))
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gconf_postinst', True) postinst += d.getVar('gconf_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True) prerm = d.getVar('pkg_prerm_%s' % pkg, True) or d.getVar('pkg_prerm', True)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += d.getVar('gconf_prerm', True) prerm += d.getVar('gconf_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm) d.setVar('pkg_prerm_%s' % pkg, prerm)
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf' rdepends += ' ' + d.getVar('MLPREFIX') + 'gconf'
d.setVar("RDEPENDS_%s" % pkg, rdepends) d.setVar("RDEPENDS_%s" % pkg, rdepends)
} }

View File

@ -1,7 +1,7 @@
def gnome_verdir(v): def gnome_verdir(v):
import re import re
m = re.match("^([0-9]+)\.([0-9]+)", v) m = re.match("^([0-9]+)\.([0-9]+)", v)
return "%s.%s" % (m.group(1), m.group(2)) return "%s.%s" % (m.group(1), m.group(2))
GNOME_COMPRESS_TYPE ?= "bz2" GNOME_COMPRESS_TYPE ?= "bz2"
SECTION ?= "x11/gnome" SECTION ?= "x11/gnome"

View File

@ -28,31 +28,31 @@ done
} }
python populate_packages_append () { python populate_packages_append () {
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST', True)
for pkg in packages: for pkg in packages:
icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True))
if not os.path.exists(icon_dir): if not os.path.exists(icon_dir):
continue continue
bb.note("adding hicolor-icon-theme dependency to %s" % pkg) bb.note("adding hicolor-icon-theme dependency to %s" % pkg)
rdepends = d.getVar('RDEPENDS_%s' % pkg, True) rdepends = d.getVar('RDEPENDS_%s' % pkg, True)
rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme" rdepends = rdepends + ' ' + d.getVar('MLPREFIX') + "hicolor-icon-theme"
d.setVar('RDEPENDS_%s' % pkg, rdepends) d.setVar('RDEPENDS_%s' % pkg, rdepends)
bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('gtk_icon_cache_postinst', True) postinst += d.getVar('gtk_icon_cache_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('gtk_icon_cache_postrm', True) postrm += d.getVar('gtk_icon_cache_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }

View File

@ -164,28 +164,28 @@ do_rootfs[umask] = "022"
fakeroot do_rootfs () { fakeroot do_rootfs () {
#set -x #set -x
# When use the rpm incremental image generation, don't remove the rootfs # When use the rpm incremental image generation, don't remove the rootfs
if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then if [ "${INC_RPM_IMAGE_GEN}" != "1" -o "${IMAGE_PKGTYPE}" != "rpm" ]; then
rm -rf ${IMAGE_ROOTFS} rm -rf ${IMAGE_ROOTFS}
elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then elif [ -d ${T}/saved_rpmlib/var/lib/rpm ]; then
# Move the rpmlib back # Move the rpmlib back
if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then if [ ! -d ${IMAGE_ROOTFS}/var/lib/rpm ]; then
mkdir -p ${IMAGE_ROOTFS}/var/lib/ mkdir -p ${IMAGE_ROOTFS}/var/lib/
mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/ mv ${T}/saved_rpmlib/var/lib/rpm ${IMAGE_ROOTFS}/var/lib/
fi fi
fi fi
rm -rf ${MULTILIB_TEMP_ROOTFS} rm -rf ${MULTILIB_TEMP_ROOTFS}
mkdir -p ${IMAGE_ROOTFS} mkdir -p ${IMAGE_ROOTFS}
mkdir -p ${DEPLOY_DIR_IMAGE} mkdir -p ${DEPLOY_DIR_IMAGE}
cp ${COREBASE}/meta/files/deploydir_readme.txt ${DEPLOY_DIR_IMAGE}/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt || true cp ${COREBASE}/meta/files/deploydir_readme.txt ${DEPLOY_DIR_IMAGE}/README_-_DO_NOT_DELETE_FILES_IN_THIS_DIRECTORY.txt || true
# If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by # If "${IMAGE_ROOTFS}/dev" exists, then the device had been made by
# the previous build # the previous build
if [ "${USE_DEVFS}" != "1" -a ! -r "${IMAGE_ROOTFS}/dev" ]; then if [ "${USE_DEVFS}" != "1" -a ! -r "${IMAGE_ROOTFS}/dev" ]; then
for devtable in ${@get_devtable_list(d)}; do for devtable in ${@get_devtable_list(d)}; do
# Always return ture since there maybe already one when use the # Always return ture since there maybe already one when use the
# incremental image generation # incremental image generation
makedevs -r ${IMAGE_ROOTFS} -D $devtable makedevs -r ${IMAGE_ROOTFS} -D $devtable
done done
fi fi
@ -398,7 +398,7 @@ rootfs_trim_schemas () {
# Need this in case no files exist # Need this in case no files exist
if [ -e $schema ]; then if [ -e $schema ]; then
oe-trim-schemas $schema > $schema.new oe-trim-schemas $schema > $schema.new
mv $schema.new $schema mv $schema.new $schema
fi fi
done done
} }

View File

@ -48,7 +48,7 @@ def get_imagecmds(d):
types.remove("live") types.remove("live")
if d.getVar('IMAGE_LINK_NAME', True): if d.getVar('IMAGE_LINK_NAME', True):
cmds += " rm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*" cmds += "\trm -f ${DEPLOY_DIR_IMAGE}/${IMAGE_LINK_NAME}.*"
for type in types: for type in types:
ccmd = [] ccmd = []

View File

@ -7,38 +7,38 @@
valid_archs = "alpha cris ia64 \ valid_archs = "alpha cris ia64 \
i386 x86 \ i386 x86 \
m68knommu m68k ppc powerpc powerpc64 ppc64 \ m68knommu m68k ppc powerpc powerpc64 ppc64 \
sparc sparc64 \ sparc sparc64 \
arm \ arm \
m32r mips \ m32r mips \
sh sh64 um h8300 \ sh sh64 um h8300 \
parisc s390 v850 \ parisc s390 v850 \
avr32 blackfin \ avr32 blackfin \
microblaze" microblaze"
def map_kernel_arch(a, d): def map_kernel_arch(a, d):
import re import re
valid_archs = d.getVar('valid_archs', True).split() valid_archs = d.getVar('valid_archs', True).split()
if re.match('(i.86|athlon|x86.64)$', a): return 'x86' if re.match('(i.86|athlon|x86.64)$', a): return 'x86'
elif re.match('armeb$', a): return 'arm' elif re.match('armeb$', a): return 'arm'
elif re.match('mips(el|64|64el)$', a): return 'mips' elif re.match('mips(el|64|64el)$', a): return 'mips'
elif re.match('p(pc|owerpc)(|64)', a): return 'powerpc' elif re.match('p(pc|owerpc)(|64)', a): return 'powerpc'
elif re.match('sh(3|4)$', a): return 'sh' elif re.match('sh(3|4)$', a): return 'sh'
elif re.match('bfin', a): return 'blackfin' elif re.match('bfin', a): return 'blackfin'
elif re.match('microblazeel', a): return 'microblaze' elif re.match('microblazeel', a): return 'microblaze'
elif a in valid_archs: return a elif a in valid_archs: return a
else: else:
bb.error("cannot map '%s' to a linux kernel architecture" % a) bb.error("cannot map '%s' to a linux kernel architecture" % a)
export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}"
def map_uboot_arch(a, d): def map_uboot_arch(a, d):
import re import re
if re.match('p(pc|owerpc)(|64)', a): return 'ppc' if re.match('p(pc|owerpc)(|64)', a): return 'ppc'
elif re.match('i.86$', a): return 'x86' elif re.match('i.86$', a): return 'x86'
return a return a
export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}"

View File

@ -6,41 +6,41 @@ SRCTREECOVEREDTASKS += "do_kernel_link_vmlinux do_kernel_configme do_validate_br
# returns local (absolute) path names for all valid patches in the # returns local (absolute) path names for all valid patches in the
# src_uri # src_uri
def find_patches(d): def find_patches(d):
patches=src_patches(d) patches = src_patches(d)
patch_list=[] patch_list=[]
for p in patches: for p in patches:
_, _, local, _, _, _ = bb.decodeurl(p) _, _, local, _, _, _ = bb.decodeurl(p)
patch_list.append(local) patch_list.append(local)
return patch_list return patch_list
# returns all the elements from the src uri that are .scc files # returns all the elements from the src uri that are .scc files
def find_sccs(d): def find_sccs(d):
sources=src_patches(d, True) sources=src_patches(d, True)
sources_list=[] sources_list=[]
for s in sources: for s in sources:
base, ext = os.path.splitext(os.path.basename(s)) base, ext = os.path.splitext(os.path.basename(s))
if ext and ext in ('.scc' '.cfg'): if ext and ext in ('.scc' '.cfg'):
sources_list.append(s) sources_list.append(s)
elif base and base in 'defconfig': elif base and base in 'defconfig':
sources_list.append(s) sources_list.append(s)
return sources_list return sources_list
# this is different from find_patches, in that it returns a colon separated # this is different from find_patches, in that it returns a colon separated
# list of <patches>:<subdir> instead of just a list of patches # list of <patches>:<subdir> instead of just a list of patches
def find_urls(d): def find_urls(d):
patches=src_patches(d) patches=src_patches(d)
fetch = bb.fetch2.Fetch([], d) fetch = bb.fetch2.Fetch([], d)
patch_list=[] patch_list=[]
for p in patches: for p in patches:
_, _, local, _, _, _ = bb.decodeurl(p) _, _, local, _, _, _ = bb.decodeurl(p)
for url in fetch.urls: for url in fetch.urls:
urldata = fetch.ud[url] urldata = fetch.ud[url]
if urldata.localpath == local: if urldata.localpath == local:
patch_list.append(local+':'+urldata.path) patch_list.append(local+':'+urldata.path)
return patch_list return patch_list
do_patch() { do_patch() {

View File

@ -310,177 +310,177 @@ module_conf_sco = "alias bt-proto-2 sco"
module_conf_rfcomm = "alias bt-proto-3 rfcomm" module_conf_rfcomm = "alias bt-proto-3 rfcomm"
python populate_packages_prepend () { python populate_packages_prepend () {
def extract_modinfo(file): def extract_modinfo(file):
import tempfile, re, subprocess import tempfile, re, subprocess
tempfile.tempdir = d.getVar("WORKDIR", True) tempfile.tempdir = d.getVar("WORKDIR", True)
tf = tempfile.mkstemp() tf = tempfile.mkstemp()
tmpfile = tf[1] tmpfile = tf[1]
cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile) cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("PATH", True), d.getVar("HOST_PREFIX", True) or "", file, tmpfile)
subprocess.call(cmd, shell=True) subprocess.call(cmd, shell=True)
f = open(tmpfile) f = open(tmpfile)
l = f.read().split("\000") l = f.read().split("\000")
f.close() f.close()
os.close(tf[0]) os.close(tf[0])
os.unlink(tmpfile) os.unlink(tmpfile)
exp = re.compile("([^=]+)=(.*)") exp = re.compile("([^=]+)=(.*)")
vals = {} vals = {}
for i in l: for i in l:
m = exp.match(i) m = exp.match(i)
if not m: if not m:
continue continue
vals[m.group(1)] = m.group(2) vals[m.group(1)] = m.group(2)
return vals return vals
def parse_depmod(): def parse_depmod():
import re import re
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD', True)
if not dvar: if not dvar:
bb.error("PKGD not defined") bb.error("PKGD not defined")
return return
kernelver = d.getVar('KERNEL_VERSION', True) kernelver = d.getVar('KERNEL_VERSION', True)
kernelver_stripped = kernelver kernelver_stripped = kernelver
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver) m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
if m: if m:
kernelver_stripped = m.group(1) kernelver_stripped = m.group(1)
path = d.getVar("PATH", True) path = d.getVar("PATH", True)
cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped) cmd = "PATH=\"%s\" depmod -n -a -b %s -F %s/boot/System.map-%s %s" % (path, dvar, dvar, kernelver, kernelver_stripped)
f = os.popen(cmd, 'r') f = os.popen(cmd, 'r')
deps = {} deps = {}
pattern0 = "^(.*\.k?o):..*$" pattern0 = "^(.*\.k?o):..*$"
pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$" pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$"
pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$" pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$"
pattern3 = "^\t(.*\.k?o)\s*\\\$" pattern3 = "^\t(.*\.k?o)\s*\\\$"
pattern4 = "^\t(.*\.k?o)\s*$" pattern4 = "^\t(.*\.k?o)\s*$"
line = f.readline() line = f.readline()
while line: while line:
if not re.match(pattern0, line): if not re.match(pattern0, line):
line = f.readline() line = f.readline()
continue continue
m1 = re.match(pattern1, line) m1 = re.match(pattern1, line)
if m1: if m1:
deps[m1.group(1)] = m1.group(2).split() deps[m1.group(1)] = m1.group(2).split()
else: else:
m2 = re.match(pattern2, line) m2 = re.match(pattern2, line)
if m2: if m2:
deps[m2.group(1)] = m2.group(2).split() deps[m2.group(1)] = m2.group(2).split()
line = f.readline() line = f.readline()
m3 = re.match(pattern3, line) m3 = re.match(pattern3, line)
while m3: while m3:
deps[m2.group(1)].extend(m3.group(1).split()) deps[m2.group(1)].extend(m3.group(1).split())
line = f.readline() line = f.readline()
m3 = re.match(pattern3, line) m3 = re.match(pattern3, line)
m4 = re.match(pattern4, line) m4 = re.match(pattern4, line)
deps[m2.group(1)].extend(m4.group(1).split()) deps[m2.group(1)].extend(m4.group(1).split())
line = f.readline() line = f.readline()
f.close() f.close()
return deps return deps
def get_dependencies(file, pattern, format): def get_dependencies(file, pattern, format):
# file no longer includes PKGD # file no longer includes PKGD
file = file.replace(d.getVar('PKGD', True) or '', '', 1) file = file.replace(d.getVar('PKGD', True) or '', '', 1)
# instead is prefixed with /lib/modules/${KERNEL_VERSION} # instead is prefixed with /lib/modules/${KERNEL_VERSION}
file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1) file = file.replace("/lib/modules/%s/" % d.getVar('KERNEL_VERSION', True) or '', '', 1)
if module_deps.has_key(file): if module_deps.has_key(file):
import re import re
dependencies = [] dependencies = []
for i in module_deps[file]: for i in module_deps[file]:
m = re.match(pattern, os.path.basename(i)) m = re.match(pattern, os.path.basename(i))
if not m: if not m:
continue continue
on = legitimize_package_name(m.group(1)) on = legitimize_package_name(m.group(1))
dependency_pkg = format % on dependency_pkg = format % on
dependencies.append(dependency_pkg) dependencies.append(dependency_pkg)
return dependencies return dependencies
return [] return []
def frob_metadata(file, pkg, pattern, format, basename): def frob_metadata(file, pkg, pattern, format, basename):
import re import re
vals = extract_modinfo(file) vals = extract_modinfo(file)
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD', True)
# If autoloading is requested, output /etc/modules-load.d/<name>.conf and append # If autoloading is requested, output /etc/modules-load.d/<name>.conf and append
# appropriate modprobe commands to the postinst # appropriate modprobe commands to the postinst
autoload = d.getVar('module_autoload_%s' % basename, True) autoload = d.getVar('module_autoload_%s' % basename, True)
if autoload: if autoload:
name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename) name = '%s/etc/modules-load.d/%s.conf' % (dvar, basename)
f = open(name, 'w') f = open(name, 'w')
for m in autoload.split(): for m in autoload.split():
f.write('%s\n' % m) f.write('%s\n' % m)
f.close() f.close()
postinst = d.getVar('pkg_postinst_%s' % pkg, True) postinst = d.getVar('pkg_postinst_%s' % pkg, True)
if not postinst: if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg) bb.fatal("pkg_postinst_%s not defined" % pkg)
postinst += d.getVar('autoload_postinst_fragment', True) % autoload postinst += d.getVar('autoload_postinst_fragment', True) % autoload
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
# Write out any modconf fragment # Write out any modconf fragment
modconf = d.getVar('module_conf_%s' % basename, True) modconf = d.getVar('module_conf_%s' % basename, True)
if modconf: if modconf:
name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename)
f = open(name, 'w') f = open(name, 'w')
f.write("%s\n" % modconf) f.write("%s\n" % modconf)
f.close() f.close()
files = d.getVar('FILES_%s' % pkg, True) files = d.getVar('FILES_%s' % pkg, True)
files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename)
d.setVar('FILES_%s' % pkg, files) d.setVar('FILES_%s' % pkg, files)
if vals.has_key("description"): if vals.has_key("description"):
old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" old_desc = d.getVar('DESCRIPTION_' + pkg, True) or ""
d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"])
rdepends_str = d.getVar('RDEPENDS_' + pkg, True) rdepends_str = d.getVar('RDEPENDS_' + pkg, True)
if rdepends_str: if rdepends_str:
rdepends = rdepends_str.split() rdepends = rdepends_str.split()
else: else:
rdepends = [] rdepends = []
rdepends.extend(get_dependencies(file, pattern, format)) rdepends.extend(get_dependencies(file, pattern, format))
d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends)) d.setVar('RDEPENDS_' + pkg, ' '.join(rdepends))
module_deps = parse_depmod() module_deps = parse_depmod()
module_regex = '^(.*)\.k?o$' module_regex = '^(.*)\.k?o$'
module_pattern = 'kernel-module-%s' module_pattern = 'kernel-module-%s'
postinst = d.getVar('pkg_postinst_modules', True) postinst = d.getVar('pkg_postinst_modules', True)
postrm = d.getVar('pkg_postrm_modules', True) postrm = d.getVar('pkg_postrm_modules', True)
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True)) do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-%s' % d.getVar("KERNEL_VERSION", True))
# If modules-load.d and modprobe.d are empty at this point, remove them to # If modules-load.d and modprobe.d are empty at this point, remove them to
# avoid warnings. removedirs only raises an OSError if an empty # avoid warnings. removedirs only raises an OSError if an empty
# directory cannot be removed. # directory cannot be removed.
dvar = d.getVar('PKGD', True) dvar = d.getVar('PKGD', True)
for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]: for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]:
if len(os.listdir(dir)) == 0: if len(os.listdir(dir)) == 0:
os.rmdir(dir) os.rmdir(dir)
import re import re
metapkg = "kernel-modules" metapkg = "kernel-modules"
d.setVar('ALLOW_EMPTY_' + metapkg, "1") d.setVar('ALLOW_EMPTY_' + metapkg, "1")
d.setVar('FILES_' + metapkg, "") d.setVar('FILES_' + metapkg, "")
blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ] blacklist = [ 'kernel-dev', 'kernel-image', 'kernel-base', 'kernel-vmlinux' ]
for l in module_deps.values(): for l in module_deps.values():
for i in l: for i in l:
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1)) pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
blacklist.append(pkg) blacklist.append(pkg)
metapkg_rdepends = [] metapkg_rdepends = []
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]: for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends: if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg) metapkg_rdepends.append(pkg)
d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends)) d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package') d.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package')
packages.append(metapkg) packages.append(metapkg)
d.setVar('PACKAGES', ' '.join(packages)) d.setVar('PACKAGES', ' '.join(packages))
} }
# Support checking the kernel size since some kernels need to reside in partitions # Support checking the kernel size since some kernels need to reside in partitions

View File

@ -23,13 +23,13 @@ def get_libc_fpu_setting(bb, d):
return "" return ""
python populate_packages_prepend () { python populate_packages_prepend () {
if d.getVar('DEBIAN_NAMES', True): if d.getVar('DEBIAN_NAMES', True):
bpn = d.getVar('BPN', True) bpn = d.getVar('BPN', True)
d.setVar('PKG_'+bpn, 'libc6') d.setVar('PKG_'+bpn, 'libc6')
d.setVar('PKG_'+bpn+'-dev', 'libc6-dev') d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg') d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg')
# For backward compatibility with old -dbg package # For backward compatibility with old -dbg package
d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg') d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg')
d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg') d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg')
d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg') d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg')
} }

View File

@ -28,10 +28,10 @@ python __anonymous () {
if r.match(target_arch): if r.match(target_arch):
depends = d.getVar("DEPENDS", True) depends = d.getVar("DEPENDS", True)
if use_cross_localedef == "1" : if use_cross_localedef == "1" :
depends = "%s cross-localedef-native" % depends depends = "%s cross-localedef-native" % depends
else: else:
depends = "%s qemu-native" % depends depends = "%s qemu-native" % depends
d.setVar("DEPENDS", depends) d.setVar("DEPENDS", depends)
d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile") d.setVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", "compile")
break break
@ -118,270 +118,270 @@ do_collect_bins_from_locale_tree() {
inherit qemu inherit qemu
python package_do_split_gconvs () { python package_do_split_gconvs () {
import os, re import os, re
if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): if (d.getVar('PACKAGE_NO_GCONV', True) == '1'):
bb.note("package requested not splitting gconvs") bb.note("package requested not splitting gconvs")
return return
if not d.getVar('PACKAGES', True): if not d.getVar('PACKAGES', True):
return return
mlprefix = d.getVar("MLPREFIX", True) or "" mlprefix = d.getVar("MLPREFIX", True) or ""
bpn = d.getVar('BPN', True) bpn = d.getVar('BPN', True)
libdir = d.getVar('libdir', True) libdir = d.getVar('libdir', True)
if not libdir: if not libdir:
bb.error("libdir not defined") bb.error("libdir not defined")
return return
datadir = d.getVar('datadir', True) datadir = d.getVar('datadir', True)
if not datadir: if not datadir:
bb.error("datadir not defined") bb.error("datadir not defined")
return return
gconv_libdir = base_path_join(libdir, "gconv") gconv_libdir = base_path_join(libdir, "gconv")
charmap_dir = base_path_join(datadir, "i18n", "charmaps") charmap_dir = base_path_join(datadir, "i18n", "charmaps")
locales_dir = base_path_join(datadir, "i18n", "locales") locales_dir = base_path_join(datadir, "i18n", "locales")
binary_locales_dir = base_path_join(libdir, "locale") binary_locales_dir = base_path_join(libdir, "locale")
def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group):
deps = [] deps = []
f = open(fn, "r") f = open(fn, "r")
c_re = re.compile('^copy "(.*)"') c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*') i_re = re.compile('^include "(\w+)".*')
for l in f.readlines(): for l in f.readlines():
m = c_re.match(l) or i_re.match(l) m = c_re.match(l) or i_re.match(l)
if m: if m:
dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1))) dp = legitimize_package_name('%s%s-gconv-%s' % (mlprefix, bpn, m.group(1)))
if not dp in deps: if not dp in deps:
deps.append(dp) deps.append(dp)
f.close() f.close()
if deps != []: if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc': if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \ do_split_packages(d, gconv_libdir, file_regex='^(.*)\.so$', output_pattern=bpn+'-gconv-%s', \
description='gconv module for character set %s', hook=calc_gconv_deps, \ description='gconv module for character set %s', hook=calc_gconv_deps, \
extra_depends=bpn+'-gconv') extra_depends=bpn+'-gconv')
def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group): def calc_charmap_deps(fn, pkg, file_regex, output_pattern, group):
deps = [] deps = []
f = open(fn, "r") f = open(fn, "r")
c_re = re.compile('^copy "(.*)"') c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*') i_re = re.compile('^include "(\w+)".*')
for l in f.readlines(): for l in f.readlines():
m = c_re.match(l) or i_re.match(l) m = c_re.match(l) or i_re.match(l)
if m: if m:
dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1))) dp = legitimize_package_name('%s%s-charmap-%s' % (mlprefix, bpn, m.group(1)))
if not dp in deps: if not dp in deps:
deps.append(dp) deps.append(dp)
f.close() f.close()
if deps != []: if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc': if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \ do_split_packages(d, charmap_dir, file_regex='^(.*)\.gz$', output_pattern=bpn+'-charmap-%s', \
description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='') description='character map for %s encoding', hook=calc_charmap_deps, extra_depends='')
def calc_locale_deps(fn, pkg, file_regex, output_pattern, group): def calc_locale_deps(fn, pkg, file_regex, output_pattern, group):
deps = [] deps = []
f = open(fn, "r") f = open(fn, "r")
c_re = re.compile('^copy "(.*)"') c_re = re.compile('^copy "(.*)"')
i_re = re.compile('^include "(\w+)".*') i_re = re.compile('^include "(\w+)".*')
for l in f.readlines(): for l in f.readlines():
m = c_re.match(l) or i_re.match(l) m = c_re.match(l) or i_re.match(l)
if m: if m:
dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1)) dp = legitimize_package_name(mlprefix+bpn+'-localedata-%s' % m.group(1))
if not dp in deps: if not dp in deps:
deps.append(dp) deps.append(dp)
f.close() f.close()
if deps != []: if deps != []:
d.setVar('RDEPENDS_%s' % pkg, " ".join(deps)) d.setVar('RDEPENDS_%s' % pkg, " ".join(deps))
if bpn != 'glibc': if bpn != 'glibc':
d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc')) d.setVar('RPROVIDES_%s' % pkg, pkg.replace(bpn, 'glibc'))
do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \ do_split_packages(d, locales_dir, file_regex='(.*)', output_pattern=bpn+'-localedata-%s', \
description='locale definition for %s', hook=calc_locale_deps, extra_depends='') description='locale definition for %s', hook=calc_locale_deps, extra_depends='')
d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv') d.setVar('PACKAGES', d.getVar('PACKAGES') + ' ' + d.getVar('MLPREFIX') + bpn + '-gconv')
use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True)
dot_re = re.compile("(.*)\.(.*)") dot_re = re.compile("(.*)\.(.*)")
# Read in supported locales and associated encodings # Read in supported locales and associated encodings
supported = {} supported = {}
with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f: with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f:
for line in f.readlines(): for line in f.readlines():
try: try:
locale, charset = line.rstrip().split() locale, charset = line.rstrip().split()
except ValueError: except ValueError:
continue continue
supported[locale] = charset supported[locale] = charset
# GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales
to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True) to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True)
if not to_generate or to_generate == 'all': if not to_generate or to_generate == 'all':
to_generate = supported.keys() to_generate = supported.keys()
else: else:
to_generate = to_generate.split() to_generate = to_generate.split()
for locale in to_generate: for locale in to_generate:
if locale not in supported: if locale not in supported:
if '.' in locale: if '.' in locale:
charset = locale.split('.')[1] charset = locale.split('.')[1]
else: else:
charset = 'UTF-8' charset = 'UTF-8'
bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset)) bb.warn("Unsupported locale '%s', assuming encoding '%s'" % (locale, charset))
supported[locale] = charset supported[locale] = charset
def output_locale_source(name, pkgname, locale, encoding): def output_locale_source(name, pkgname, locale, encoding):
d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \ d.setVar('RDEPENDS_%s' % pkgname, 'localedef %s-localedata-%s %s-charmap-%s' % \
(mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) (mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding)))
d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \
% (locale, encoding, locale)) % (locale, encoding, locale))
d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \
(locale, encoding, locale)) (locale, encoding, locale))
def output_locale_binary_rdepends(name, pkgname, locale, encoding): def output_locale_binary_rdepends(name, pkgname, locale, encoding):
m = re.match("(.*)\.(.*)", name) m = re.match("(.*)\.(.*)", name)
if m: if m:
libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-","")) libc_name = "%s.%s" % (m.group(1), m.group(2).lower().replace("-",""))
else: else:
libc_name = name libc_name = name
d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \
% (mlprefix+bpn, libc_name))) % (mlprefix+bpn, libc_name)))
commands = {} commands = {}
def output_locale_binary(name, pkgname, locale, encoding): def output_locale_binary(name, pkgname, locale, encoding):
treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree")
ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True))
path = d.getVar("PATH", True) path = d.getVar("PATH", True)
i18npath = base_path_join(treedir, datadir, "i18n") i18npath = base_path_join(treedir, datadir, "i18n")
gconvpath = base_path_join(treedir, "iconvdata") gconvpath = base_path_join(treedir, "iconvdata")
outputpath = base_path_join(treedir, libdir, "locale") outputpath = base_path_join(treedir, libdir, "locale")
use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0"
if use_cross_localedef == "1": if use_cross_localedef == "1":
target_arch = d.getVar('TARGET_ARCH', True) target_arch = d.getVar('TARGET_ARCH', True)
locale_arch_options = { \ locale_arch_options = { \
"arm": " --uint32-align=4 --little-endian ", \ "arm": " --uint32-align=4 --little-endian ", \
"sh4": " --uint32-align=4 --big-endian ", \ "sh4": " --uint32-align=4 --big-endian ", \
"powerpc": " --uint32-align=4 --big-endian ", \ "powerpc": " --uint32-align=4 --big-endian ", \
"powerpc64": " --uint32-align=4 --big-endian ", \ "powerpc64": " --uint32-align=4 --big-endian ", \
"mips": " --uint32-align=4 --big-endian ", \ "mips": " --uint32-align=4 --big-endian ", \
"mips64": " --uint32-align=4 --big-endian ", \ "mips64": " --uint32-align=4 --big-endian ", \
"mipsel": " --uint32-align=4 --little-endian ", \ "mipsel": " --uint32-align=4 --little-endian ", \
"mips64el":" --uint32-align=4 --little-endian ", \ "mips64el":" --uint32-align=4 --little-endian ", \
"i586": " --uint32-align=4 --little-endian ", \ "i586": " --uint32-align=4 --little-endian ", \
"i686": " --uint32-align=4 --little-endian ", \ "i686": " --uint32-align=4 --little-endian ", \
"x86_64": " --uint32-align=4 --little-endian " } "x86_64": " --uint32-align=4 --little-endian " }
if target_arch in locale_arch_options: if target_arch in locale_arch_options:
localedef_opts = locale_arch_options[target_arch] localedef_opts = locale_arch_options[target_arch]
else: else:
bb.error("locale_arch_options not found for target_arch=" + target_arch) bb.error("locale_arch_options not found for target_arch=" + target_arch)
raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options") raise bb.build.FuncFailed("unknown arch:" + target_arch + " for locale_arch_options")
localedef_opts += " --force --old-style --no-archive --prefix=%s \ localedef_opts += " --force --old-style --no-archive --prefix=%s \
--inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \ --inputfile=%s/%s/i18n/locales/%s --charmap=%s %s/%s" \
% (treedir, treedir, datadir, locale, encoding, outputpath, name) % (treedir, treedir, datadir, locale, encoding, outputpath, name)
cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \ cmd = "PATH=\"%s\" I18NPATH=\"%s\" GCONV_PATH=\"%s\" cross-localedef %s" % \
(path, i18npath, gconvpath, localedef_opts) (path, i18npath, gconvpath, localedef_opts)
else: # earlier slower qemu way else: # earlier slower qemu way
qemu = qemu_target_binary(d) qemu = qemu_target_binary(d)
localedef_opts = "--force --old-style --no-archive --prefix=%s \ localedef_opts = "--force --old-style --no-archive --prefix=%s \
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \ --inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name) % (treedir, datadir, locale, encoding, name)
qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
if not qemu_options: if not qemu_options:
qemu_options = d.getVar('QEMU_OPTIONS', True) qemu_options = d.getVar('QEMU_OPTIONS', True)
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
(path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts) (path, i18npath, qemu, treedir, ldlibdir, qemu_options, treedir, localedef_opts)
commands["%s/%s" % (outputpath, name)] = cmd commands["%s/%s" % (outputpath, name)] = cmd
bb.note("generating locale %s (%s)" % (locale, encoding)) bb.note("generating locale %s (%s)" % (locale, encoding))
def output_locale(name, locale, encoding): def output_locale(name, locale, encoding):
pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name) pkgname = d.getVar('MLPREFIX') + 'locale-base-' + legitimize_package_name(name)
d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') d.setVar('ALLOW_EMPTY_%s' % pkgname, '1')
d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True)))
rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name))
m = re.match("(.*)_(.*)", name) m = re.match("(.*)_(.*)", name)
if m: if m:
rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1)) rprovides += ' %svirtual-locale-%s' % (mlprefix, m.group(1))
d.setVar('RPROVIDES_%s' % pkgname, rprovides) d.setVar('RPROVIDES_%s' % pkgname, rprovides)
if use_bin == "compile": if use_bin == "compile":
output_locale_binary_rdepends(name, pkgname, locale, encoding) output_locale_binary_rdepends(name, pkgname, locale, encoding)
output_locale_binary(name, pkgname, locale, encoding) output_locale_binary(name, pkgname, locale, encoding)
elif use_bin == "precompiled": elif use_bin == "precompiled":
output_locale_binary_rdepends(name, pkgname, locale, encoding) output_locale_binary_rdepends(name, pkgname, locale, encoding)
else: else:
output_locale_source(name, pkgname, locale, encoding) output_locale_source(name, pkgname, locale, encoding)
if use_bin == "compile": if use_bin == "compile":
bb.note("preparing tree for binary locale generation") bb.note("preparing tree for binary locale generation")
bb.build.exec_func("do_prep_locale_tree", d) bb.build.exec_func("do_prep_locale_tree", d)
utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0)
encodings = {} encodings = {}
for locale in to_generate: for locale in to_generate:
charset = supported[locale] charset = supported[locale]
if utf8_only and charset != 'UTF-8': if utf8_only and charset != 'UTF-8':
continue continue
m = dot_re.match(locale) m = dot_re.match(locale)
if m: if m:
base = m.group(1) base = m.group(1)
else: else:
base = locale base = locale
# Precompiled locales are kept as is, obeying SUPPORTED, while # Precompiled locales are kept as is, obeying SUPPORTED, while
# others are adjusted, ensuring that the non-suffixed locales # others are adjusted, ensuring that the non-suffixed locales
# are utf-8, while the suffixed are not. # are utf-8, while the suffixed are not.
if use_bin == "precompiled": if use_bin == "precompiled":
output_locale(locale, base, charset) output_locale(locale, base, charset)
else: else:
if charset == 'UTF-8': if charset == 'UTF-8':
output_locale(base, base, charset) output_locale(base, base, charset)
else: else:
output_locale('%s.%s' % (base, charset), base, charset) output_locale('%s.%s' % (base, charset), base, charset)
if use_bin == "compile": if use_bin == "compile":
makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile")
m = open(makefile, "w") m = open(makefile, "w")
m.write("all: %s\n\n" % " ".join(commands.keys())) m.write("all: %s\n\n" % " ".join(commands.keys()))
for cmd in commands: for cmd in commands:
m.write(cmd + ":\n") m.write(cmd + ":\n")
m.write(" " + commands[cmd] + "\n\n") m.write("\t" + commands[cmd] + "\n\n")
m.close() m.close()
d.setVar("B", os.path.dirname(makefile)) d.setVar("B", os.path.dirname(makefile))
d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}") d.setVar("EXTRA_OEMAKE", "${PARALLEL_MAKE}")
bb.note("Executing binary locale generation makefile") bb.note("Executing binary locale generation makefile")
bb.build.exec_func("oe_runmake", d) bb.build.exec_func("oe_runmake", d)
bb.note("collecting binary locales from locale tree") bb.note("collecting binary locales from locale tree")
bb.build.exec_func("do_collect_bins_from_locale_tree", d) bb.build.exec_func("do_collect_bins_from_locale_tree", d)
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
output_pattern=bpn+'-binary-localedata-%s', \ output_pattern=bpn+'-binary-localedata-%s', \
description='binary locale definition for %s', extra_depends='', allow_dirs=True) description='binary locale definition for %s', extra_depends='', allow_dirs=True)
elif use_bin == "precompiled": elif use_bin == "precompiled":
do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ do_split_packages(d, binary_locales_dir, file_regex='(.*)', \
output_pattern=bpn+'-binary-localedata-%s', \ output_pattern=bpn+'-binary-localedata-%s', \
description='binary locale definition for %s', extra_depends='', allow_dirs=True) description='binary locale definition for %s', extra_depends='', allow_dirs=True)
else: else:
bb.note("generation of binary locales disabled. this may break i18n!") bb.note("generation of binary locales disabled. this may break i18n!")
} }
# We want to do this indirection so that we can safely 'return' # We want to do this indirection so that we can safely 'return'
# from the called function even though we're prepending # from the called function even though we're prepending
python populate_packages_prepend () { python populate_packages_prepend () {
bb.build.exec_func('package_do_split_gconvs', d) bb.build.exec_func('package_do_split_gconvs', d)
} }

View File

@ -385,6 +385,6 @@ do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/"
ROOTFS_POSTPROCESS_COMMAND_prepend = "license_create_manifest; " ROOTFS_POSTPROCESS_COMMAND_prepend = "license_create_manifest; "
python do_populate_lic_setscene () { python do_populate_lic_setscene () {
sstate_setscene(d) sstate_setscene(d)
} }
addtask do_populate_lic_setscene addtask do_populate_lic_setscene

View File

@ -2,76 +2,76 @@ METADATA_BRANCH ?= "${@base_detect_branch(d)}"
METADATA_REVISION ?= "${@base_detect_revision(d)}" METADATA_REVISION ?= "${@base_detect_revision(d)}"
def base_detect_revision(d): def base_detect_revision(d):
path = base_get_scmbasepath(d) path = base_get_scmbasepath(d)
scms = [base_get_metadata_git_revision, \ scms = [base_get_metadata_git_revision, \
base_get_metadata_svn_revision] base_get_metadata_svn_revision]
for scm in scms: for scm in scms:
rev = scm(path, d) rev = scm(path, d)
if rev <> "<unknown>": if rev <> "<unknown>":
return rev return rev
return "<unknown>" return "<unknown>"
def base_detect_branch(d): def base_detect_branch(d):
path = base_get_scmbasepath(d) path = base_get_scmbasepath(d)
scms = [base_get_metadata_git_branch] scms = [base_get_metadata_git_branch]
for scm in scms: for scm in scms:
rev = scm(path, d) rev = scm(path, d)
if rev <> "<unknown>": if rev <> "<unknown>":
return rev.strip() return rev.strip()
return "<unknown>" return "<unknown>"
def base_get_scmbasepath(d): def base_get_scmbasepath(d):
return d.getVar( 'COREBASE', True) return d.getVar( 'COREBASE', True)
def base_get_metadata_monotone_branch(path, d): def base_get_metadata_monotone_branch(path, d):
monotone_branch = "<unknown>" monotone_branch = "<unknown>"
try: try:
monotone_branch = file( "%s/_MTN/options" % path ).read().strip() monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
if monotone_branch.startswith( "database" ): if monotone_branch.startswith( "database" ):
monotone_branch_words = monotone_branch.split() monotone_branch_words = monotone_branch.split()
monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1] monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
except: except:
pass pass
return monotone_branch return monotone_branch
def base_get_metadata_monotone_revision(path, d): def base_get_metadata_monotone_revision(path, d):
monotone_revision = "<unknown>" monotone_revision = "<unknown>"
try: try:
monotone_revision = file( "%s/_MTN/revision" % path ).read().strip() monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
if monotone_revision.startswith( "format_version" ): if monotone_revision.startswith( "format_version" ):
monotone_revision_words = monotone_revision.split() monotone_revision_words = monotone_revision.split()
monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1] monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
except IOError: except IOError:
pass pass
return monotone_revision return monotone_revision
def base_get_metadata_svn_revision(path, d): def base_get_metadata_svn_revision(path, d):
revision = "<unknown>" revision = "<unknown>"
try: try:
revision = file( "%s/.svn/entries" % path ).readlines()[3].strip() revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
except IOError: except IOError:
pass pass
return revision return revision
def base_get_metadata_git_branch(path, d): def base_get_metadata_git_branch(path, d):
branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read() branch = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path).read()
if len(branch) != 0: if len(branch) != 0:
return branch return branch
return "<unknown>" return "<unknown>"
def base_get_metadata_git_revision(path, d): def base_get_metadata_git_revision(path, d):
f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path) f = os.popen("cd %s; git log -n 1 --pretty=oneline -- 2>&1" % path)
data = f.read() data = f.read()
if f.close() is None: if f.close() is None:
rev = data.split(" ")[0] rev = data.split(" ")[0]
if len(rev) != 0: if len(rev) != 0:
return rev return rev
return "<unknown>" return "<unknown>"

View File

@ -29,32 +29,32 @@ fi
} }
python populate_packages_append () { python populate_packages_append () {
import re import re
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES', True).split()
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST', True)
for pkg in packages: for pkg in packages:
mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg) mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg)
mimes = [] mimes = []
mime_re = re.compile(".*\.xml$") mime_re = re.compile(".*\.xml$")
if os.path.exists(mime_dir): if os.path.exists(mime_dir):
for f in os.listdir(mime_dir): for f in os.listdir(mime_dir):
if mime_re.match(f): if mime_re.match(f):
mimes.append(f) mimes.append(f)
if mimes: if mimes:
bb.note("adding mime postinst and postrm scripts to %s" % pkg) bb.note("adding mime postinst and postrm scripts to %s" % pkg)
postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += d.getVar('mime_postinst', True) postinst += d.getVar('mime_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += d.getVar('mime_postrm', True) postrm += d.getVar('mime_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
bb.note("adding shared-mime-info-data dependency to %s" % pkg) bb.note("adding shared-mime-info-data dependency to %s" % pkg)
rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" ) rdepends = explode_deps(d.getVar('RDEPENDS_' + pkg, False) or d.getVar('RDEPENDS', False) or "" )
rdepends.append("shared-mime-info-data") rdepends.append("shared-mime-info-data")
d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends)) d.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends))
} }

File diff suppressed because it is too large Load Diff

View File

@ -418,8 +418,8 @@ python () {
} }
python do_package_write_deb () { python do_package_write_deb () {
bb.build.exec_func("read_subpackage_metadata", d) bb.build.exec_func("read_subpackage_metadata", d)
bb.build.exec_func("do_package_deb", d) bb.build.exec_func("do_package_deb", d)
} }
do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}" do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}"
do_package_write_deb[umask] = "022" do_package_write_deb[umask] = "022"

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,13 @@
python read_subpackage_metadata () { python read_subpackage_metadata () {
import oe.packagedata import oe.packagedata
data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d) data = oe.packagedata.read_pkgdata(d.getVar('PN', True), d)
for key in data.keys(): for key in data.keys():
d.setVar(key, data[key]) d.setVar(key, data[key])
for pkg in d.getVar('PACKAGES', True).split(): for pkg in d.getVar('PACKAGES', True).split():
sdata = oe.packagedata.read_subpkgdata(pkg, d) sdata = oe.packagedata.read_subpkgdata(pkg, d)
for key in sdata.keys(): for key in sdata.keys():
d.setVar(key, sdata[key]) d.setVar(key, sdata[key])
} }

View File

@ -8,164 +8,164 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_sysroot"
inherit terminal inherit terminal
def src_patches(d, all = False ): def src_patches(d, all = False ):
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR', True)
fetch = bb.fetch2.Fetch([], d) fetch = bb.fetch2.Fetch([], d)
patches = [] patches = []
sources = [] sources = []
for url in fetch.urls: for url in fetch.urls:
local = patch_path(url, fetch, workdir) local = patch_path(url, fetch, workdir)
if not local: if not local:
if all: if all:
local = fetch.localpath(url) local = fetch.localpath(url)
sources.append(local) sources.append(local)
continue continue
urldata = fetch.ud[url] urldata = fetch.ud[url]
parm = urldata.parm parm = urldata.parm
patchname = parm.get('pname') or os.path.basename(local) patchname = parm.get('pname') or os.path.basename(local)
apply, reason = should_apply(parm, d) apply, reason = should_apply(parm, d)
if not apply: if not apply:
if reason: if reason:
bb.note("Patch %s %s" % (patchname, reason)) bb.note("Patch %s %s" % (patchname, reason))
continue continue
patchparm = {'patchname': patchname} patchparm = {'patchname': patchname}
if "striplevel" in parm: if "striplevel" in parm:
striplevel = parm["striplevel"] striplevel = parm["striplevel"]
elif "pnum" in parm: elif "pnum" in parm:
#bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url) #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url)
striplevel = parm["pnum"] striplevel = parm["pnum"]
else: else:
striplevel = '1' striplevel = '1'
patchparm['striplevel'] = striplevel patchparm['striplevel'] = striplevel
patchdir = parm.get('patchdir') patchdir = parm.get('patchdir')
if patchdir: if patchdir:
patchparm['patchdir'] = patchdir patchparm['patchdir'] = patchdir
localurl = bb.encodeurl(('file', '', local, '', '', patchparm)) localurl = bb.encodeurl(('file', '', local, '', '', patchparm))
patches.append(localurl) patches.append(localurl)
if all: if all:
return sources return sources
return patches return patches
def patch_path(url, fetch, workdir): def patch_path(url, fetch, workdir):
"""Return the local path of a patch, or None if this isn't a patch""" """Return the local path of a patch, or None if this isn't a patch"""
local = fetch.localpath(url) local = fetch.localpath(url)
base, ext = os.path.splitext(os.path.basename(local)) base, ext = os.path.splitext(os.path.basename(local))
if ext in ('.gz', '.bz2', '.Z'): if ext in ('.gz', '.bz2', '.Z'):
local = os.path.join(workdir, base) local = os.path.join(workdir, base)
ext = os.path.splitext(base)[1] ext = os.path.splitext(base)[1]
urldata = fetch.ud[url] urldata = fetch.ud[url]
if "apply" in urldata.parm: if "apply" in urldata.parm:
apply = oe.types.boolean(urldata.parm["apply"]) apply = oe.types.boolean(urldata.parm["apply"])
if not apply: if not apply:
return return
elif ext not in (".diff", ".patch"): elif ext not in (".diff", ".patch"):
return return
return local return local
def should_apply(parm, d): def should_apply(parm, d):
"""Determine if we should apply the given patch""" """Determine if we should apply the given patch"""
if "mindate" in parm or "maxdate" in parm: if "mindate" in parm or "maxdate" in parm:
pn = d.getVar('PN', True) pn = d.getVar('PN', True)
srcdate = d.getVar('SRCDATE_%s' % pn, True) srcdate = d.getVar('SRCDATE_%s' % pn, True)
if not srcdate: if not srcdate:
srcdate = d.getVar('SRCDATE', True) srcdate = d.getVar('SRCDATE', True)
if srcdate == "now": if srcdate == "now":
srcdate = d.getVar('DATE', True) srcdate = d.getVar('DATE', True)
if "maxdate" in parm and parm["maxdate"] < srcdate: if "maxdate" in parm and parm["maxdate"] < srcdate:
return False, 'is outdated' return False, 'is outdated'
if "mindate" in parm and parm["mindate"] > srcdate: if "mindate" in parm and parm["mindate"] > srcdate:
return False, 'is predated' return False, 'is predated'
if "minrev" in parm: if "minrev" in parm:
srcrev = d.getVar('SRCREV', True) srcrev = d.getVar('SRCREV', True)
if srcrev and srcrev < parm["minrev"]: if srcrev and srcrev < parm["minrev"]:
return False, 'applies to later revisions' return False, 'applies to later revisions'
if "maxrev" in parm: if "maxrev" in parm:
srcrev = d.getVar('SRCREV', True) srcrev = d.getVar('SRCREV', True)
if srcrev and srcrev > parm["maxrev"]: if srcrev and srcrev > parm["maxrev"]:
return False, 'applies to earlier revisions' return False, 'applies to earlier revisions'
if "rev" in parm: if "rev" in parm:
srcrev = d.getVar('SRCREV', True) srcrev = d.getVar('SRCREV', True)
if srcrev and parm["rev"] not in srcrev: if srcrev and parm["rev"] not in srcrev:
return False, "doesn't apply to revision" return False, "doesn't apply to revision"
if "notrev" in parm: if "notrev" in parm:
srcrev = d.getVar('SRCREV', True) srcrev = d.getVar('SRCREV', True)
if srcrev and parm["notrev"] in srcrev: if srcrev and parm["notrev"] in srcrev:
return False, "doesn't apply to revision" return False, "doesn't apply to revision"
return True, None return True, None
should_apply[vardepsexclude] = "DATE SRCDATE" should_apply[vardepsexclude] = "DATE SRCDATE"
python patch_do_patch() { python patch_do_patch() {
import oe.patch import oe.patch
patchsetmap = { patchsetmap = {
"patch": oe.patch.PatchTree, "patch": oe.patch.PatchTree,
"quilt": oe.patch.QuiltTree, "quilt": oe.patch.QuiltTree,
"git": oe.patch.GitApplyTree, "git": oe.patch.GitApplyTree,
} }
cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt'] cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt']
resolvermap = { resolvermap = {
"noop": oe.patch.NOOPResolver, "noop": oe.patch.NOOPResolver,
"user": oe.patch.UserResolver, "user": oe.patch.UserResolver,
} }
rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user'] rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user']
classes = {} classes = {}
s = d.getVar('S', True) s = d.getVar('S', True)
path = os.getenv('PATH') path = os.getenv('PATH')
os.putenv('PATH', d.getVar('PATH', True)) os.putenv('PATH', d.getVar('PATH', True))
for patch in src_patches(d): for patch in src_patches(d):
_, _, local, _, _, parm = bb.decodeurl(patch) _, _, local, _, _, parm = bb.decodeurl(patch)
if "patchdir" in parm: if "patchdir" in parm:
patchdir = parm["patchdir"] patchdir = parm["patchdir"]
if not os.path.isabs(patchdir): if not os.path.isabs(patchdir):
patchdir = os.path.join(s, patchdir) patchdir = os.path.join(s, patchdir)
else: else:
patchdir = s patchdir = s
if not patchdir in classes: if not patchdir in classes:
patchset = cls(patchdir, d) patchset = cls(patchdir, d)
resolver = rcls(patchset, oe_terminal) resolver = rcls(patchset, oe_terminal)
classes[patchdir] = (patchset, resolver) classes[patchdir] = (patchset, resolver)
patchset.Clean() patchset.Clean()
else: else:
patchset, resolver = classes[patchdir] patchset, resolver = classes[patchdir]
bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d))) bb.note("Applying patch '%s' (%s)" % (parm['patchname'], oe.path.format_display(local, d)))
try: try:
patchset.Import({"file":local, "strippath": parm['striplevel']}, True) patchset.Import({"file":local, "strippath": parm['striplevel']}, True)
except Exception as exc: except Exception as exc:
bb.fatal(str(exc)) bb.fatal(str(exc))
try: try:
resolver.Resolve() resolver.Resolve()
except bb.BBHandledException as e: except bb.BBHandledException as e:
bb.fatal(str(e)) bb.fatal(str(e))
} }
patch_do_patch[vardepsexclude] = "PATCHRESOLVE" patch_do_patch[vardepsexclude] = "PATCHRESOLVE"

View File

@ -1,22 +1,22 @@
python do_pkg_write_metainfo () { python do_pkg_write_metainfo () {
deploydir = d.getVar('DEPLOY_DIR', True) deploydir = d.getVar('DEPLOY_DIR', True)
if not deploydir: if not deploydir:
bb.error("DEPLOY_DIR not defined, unable to write package info") bb.error("DEPLOY_DIR not defined, unable to write package info")
return return
try: try:
infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a') infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
except OSError: except OSError:
raise bb.build.FuncFailed("unable to open package-info file for writing.") raise bb.build.FuncFailed("unable to open package-info file for writing.")
name = d.getVar('PN', True) name = d.getVar('PN', True)
version = d.getVar('PV', True) version = d.getVar('PV', True)
desc = d.getVar('DESCRIPTION', True) desc = d.getVar('DESCRIPTION', True)
page = d.getVar('HOMEPAGE', True) page = d.getVar('HOMEPAGE', True)
lic = d.getVar('LICENSE', True) lic = d.getVar('LICENSE', True)
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" ) infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
infofile.close() infofile.close()
} }
addtask pkg_write_metainfo after do_package before do_build addtask pkg_write_metainfo after do_package before do_build

View File

@ -32,29 +32,29 @@ python () {
} }
fakeroot python do_populate_sdk() { fakeroot python do_populate_sdk() {
bb.build.exec_func("populate_sdk_image", d) bb.build.exec_func("populate_sdk_image", d)
# Handle multilibs in the SDK environment, siteconfig, etc files... # Handle multilibs in the SDK environment, siteconfig, etc files...
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
# make sure we only use the WORKDIR value from 'd', or it can change # make sure we only use the WORKDIR value from 'd', or it can change
localdata.setVar('WORKDIR', d.getVar('WORKDIR', True)) localdata.setVar('WORKDIR', d.getVar('WORKDIR', True))
# make sure we only use the SDKTARGETSYSROOT value from 'd' # make sure we only use the SDKTARGETSYSROOT value from 'd'
localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True)) localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True))
# Process DEFAULTTUNE # Process DEFAULTTUNE
bb.build.exec_func("create_sdk_files", localdata) bb.build.exec_func("create_sdk_files", localdata)
variants = d.getVar("MULTILIB_VARIANTS", True) or "" variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split(): for item in variants.split():
# Load overrides from 'd' to avoid having to reset the value... # Load overrides from 'd' to avoid having to reset the value...
overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides) localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
bb.build.exec_func("create_sdk_files", localdata) bb.build.exec_func("create_sdk_files", localdata)
bb.build.exec_func("tar_sdk", d) bb.build.exec_func("tar_sdk", d)
} }
fakeroot populate_sdk_image() { fakeroot populate_sdk_image() {

View File

@ -4,12 +4,12 @@
# #
def qemu_target_binary(data): def qemu_target_binary(data):
import bb import bb
target_arch = data.getVar("TARGET_ARCH", True) target_arch = data.getVar("TARGET_ARCH", True)
if target_arch in ("i486", "i586", "i686"): if target_arch in ("i486", "i586", "i686"):
target_arch = "i386" target_arch = "i386"
elif target_arch == "powerpc": elif target_arch == "powerpc":
target_arch = "ppc" target_arch = "ppc"
return "qemu-" + target_arch return "qemu-" + target_arch

View File

@ -185,18 +185,18 @@ def sstate_installpkg(ss, d):
staging_target = d.getVar('STAGING_DIR_TARGET', True) staging_target = d.getVar('STAGING_DIR_TARGET', True)
staging_host = d.getVar('STAGING_DIR_HOST', True) staging_host = d.getVar('STAGING_DIR_HOST', True)
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging)
elif bb.data.inherits_class('cross', d): elif bb.data.inherits_class('cross', d):
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging) sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging)
else: else:
sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host)
# Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed
sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd) sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd)
print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd) print "Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)
subprocess.call(sstate_hardcode_cmd, shell=True) subprocess.call(sstate_hardcode_cmd, shell=True)
# Need to remove this or we'd copy it into the target directory and may # Need to remove this or we'd copy it into the target directory and may
# conflict with another writer # conflict with another writer
@ -310,50 +310,50 @@ python sstate_cleanall() {
} }
def sstate_hardcode_path(d): def sstate_hardcode_path(d):
import subprocess import subprocess
# Need to remove hardcoded paths and fix these when we install the # Need to remove hardcoded paths and fix these when we install the
# staging packages. # staging packages.
# #
# Note: the logic in this function needs to match the reverse logic # Note: the logic in this function needs to match the reverse logic
# in sstate_installpkg(ss, d) # in sstate_installpkg(ss, d)
staging = d.getVar('STAGING_DIR', True) staging = d.getVar('STAGING_DIR', True)
staging_target = d.getVar('STAGING_DIR_TARGET', True) staging_target = d.getVar('STAGING_DIR_TARGET', True)
staging_host = d.getVar('STAGING_DIR_HOST', True) staging_host = d.getVar('STAGING_DIR_HOST', True)
sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) sstate_builddir = d.getVar('SSTATE_BUILDDIR', True)
if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d):
sstate_grep_cmd = "grep -l -e '%s'" % (staging) sstate_grep_cmd = "grep -l -e '%s'" % (staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging) sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging)
elif bb.data.inherits_class('cross', d): elif bb.data.inherits_class('cross', d):
sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging) sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging) sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging)
else: else:
sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host)
fixmefn = sstate_builddir + "fixmepath" fixmefn = sstate_builddir + "fixmepath"
sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True)
sstate_filelist_cmd = "tee %s" % (fixmefn) sstate_filelist_cmd = "tee %s" % (fixmefn)
# fixmepath file needs relative paths, drop sstate_builddir prefix # fixmepath file needs relative paths, drop sstate_builddir prefix
sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn) sstate_filelist_relative_cmd = "sed -i -e 's:^%s::g' %s" % (sstate_builddir, fixmefn)
# Limit the fixpaths and sed operations based on the initial grep search # Limit the fixpaths and sed operations based on the initial grep search
# This has the side effect of making sure the vfs cache is hot # This has the side effect of making sure the vfs cache is hot
sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd) sstate_hardcode_cmd = "%s | xargs %s | %s | xargs --no-run-if-empty %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, sstate_sed_cmd)
print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd) print "Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)
subprocess.call(sstate_hardcode_cmd, shell=True) subprocess.call(sstate_hardcode_cmd, shell=True)
# If the fixmefn is empty, remove it.. # If the fixmefn is empty, remove it..
if os.stat(fixmefn).st_size == 0: if os.stat(fixmefn).st_size == 0:
os.remove(fixmefn) os.remove(fixmefn)
else: else:
print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd) print "Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)
subprocess.call(sstate_filelist_relative_cmd, shell=True) subprocess.call(sstate_filelist_relative_cmd, shell=True)
def sstate_package(ss, d): def sstate_package(ss, d):
import oe.path import oe.path

View File

@ -109,7 +109,7 @@ do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/"
do_populate_sysroot[stamp-extra-info] = "${MACHINE}" do_populate_sysroot[stamp-extra-info] = "${MACHINE}"
python do_populate_sysroot_setscene () { python do_populate_sysroot_setscene () {
sstate_setscene(d) sstate_setscene(d)
} }
addtask do_populate_sysroot_setscene addtask do_populate_sysroot_setscene

View File

@ -54,146 +54,146 @@ syslinux_hddimg_install() {
} }
python build_syslinux_menu () { python build_syslinux_menu () {
import copy import copy
import sys import sys
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR', True)
if not workdir: if not workdir:
bb.error("WORKDIR is not defined") bb.error("WORKDIR is not defined")
return return
labels = d.getVar('LABELS', True) labels = d.getVar('LABELS', True)
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
if labels == []: if labels == []:
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('SYSLINUXMENU', True) cfile = d.getVar('SYSLINUXMENU', True)
if not cfile: if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXMENU') raise bb.build.FuncFailed('Unable to read SYSLINUXMENU')
try: try:
cfgfile = file(cfile, 'w') cfgfile = file(cfile, 'w')
except OSError: except OSError:
raise bb.build.funcFailed('Unable to open %s' % (cfile)) raise bb.build.funcFailed('Unable to open %s' % (cfile))
# Beep the speaker and Clear the screen # Beep the speaker and Clear the screen
cfgfile.write('\x07\x0C') cfgfile.write('\x07\x0C')
# The title should be configurable # The title should be configurable
cfgfile.write('Linux Boot Menu\n') cfgfile.write('Linux Boot Menu\n')
cfgfile.write('The following targets are available on this image:\n') cfgfile.write('The following targets are available on this image:\n')
cfgfile.write('\n') cfgfile.write('\n')
for label in labels.split(): for label in labels.split():
from copy import deepcopy from copy import deepcopy
localdata = deepcopy(d) localdata = deepcopy(d)
overrides = localdata.getVar('OVERRIDES') overrides = localdata.getVar('OVERRIDES')
if not overrides: if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined') raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = localdata.expand(overrides) overrides = localdata.expand(overrides)
localdata.setVar('OVERRIDES', label + ':' + overrides) localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
usage = localdata.getVar('USAGE', True) usage = localdata.getVar('USAGE', True)
cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label)) cfgfile.write(' \x0F\x30\x3E%16s\x0F\x30\x37: ' % (label))
cfgfile.write('%s\n' % (usage)) cfgfile.write('%s\n' % (usage))
del localdata del localdata
cfgfile.write('\n') cfgfile.write('\n')
cfgfile.close() cfgfile.close()
} }
python build_syslinux_cfg () { python build_syslinux_cfg () {
import copy import copy
import sys import sys
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR', True)
if not workdir: if not workdir:
bb.error("WORKDIR not defined, unable to package") bb.error("WORKDIR not defined, unable to package")
return return
labels = d.getVar('LABELS', True) labels = d.getVar('LABELS', True)
if not labels: if not labels:
bb.debug(1, "LABELS not defined, nothing to do") bb.debug(1, "LABELS not defined, nothing to do")
return return
if labels == []: if labels == []:
bb.debug(1, "No labels, nothing to do") bb.debug(1, "No labels, nothing to do")
return return
cfile = d.getVar('SYSLINUXCFG', True) cfile = d.getVar('SYSLINUXCFG', True)
if not cfile: if not cfile:
raise bb.build.FuncFailed('Unable to read SYSLINUXCFG') raise bb.build.FuncFailed('Unable to read SYSLINUXCFG')
try: try:
cfgfile = file(cfile, 'w') cfgfile = file(cfile, 'w')
except OSError: except OSError:
raise bb.build.funcFailed('Unable to open %s' % (cfile)) raise bb.build.funcFailed('Unable to open %s' % (cfile))
cfgfile.write('# Automatically created by OE\n') cfgfile.write('# Automatically created by OE\n')
opts = d.getVar('SYSLINUX_OPTS', True) opts = d.getVar('SYSLINUX_OPTS', True)
if opts: if opts:
for opt in opts.split(';'): for opt in opts.split(';'):
cfgfile.write('%s\n' % opt) cfgfile.write('%s\n' % opt)
cfgfile.write('ALLOWOPTIONS 1\n'); cfgfile.write('ALLOWOPTIONS 1\n');
cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) cfgfile.write('DEFAULT %s\n' % (labels.split()[0]))
timeout = d.getVar('SYSLINUX_TIMEOUT', True) timeout = d.getVar('SYSLINUX_TIMEOUT', True)
if timeout: if timeout:
cfgfile.write('TIMEOUT %s\n' % timeout) cfgfile.write('TIMEOUT %s\n' % timeout)
else: else:
cfgfile.write('TIMEOUT 50\n') cfgfile.write('TIMEOUT 50\n')
prompt = d.getVar('SYSLINUX_PROMPT', True) prompt = d.getVar('SYSLINUX_PROMPT', True)
if prompt: if prompt:
cfgfile.write('PROMPT %s\n' % prompt) cfgfile.write('PROMPT %s\n' % prompt)
else: else:
cfgfile.write('PROMPT 1\n') cfgfile.write('PROMPT 1\n')
menu = d.getVar('AUTO_SYSLINUXMENU', True) menu = d.getVar('AUTO_SYSLINUXMENU', True)
# This is ugly. My bad. # This is ugly. My bad.
if menu: if menu:
bb.build.exec_func('build_syslinux_menu', d) bb.build.exec_func('build_syslinux_menu', d)
mfile = d.getVar('SYSLINUXMENU', True) mfile = d.getVar('SYSLINUXMENU', True)
cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) ) cfgfile.write('DISPLAY %s\n' % (mfile.split('/')[-1]) )
for label in labels.split(): for label in labels.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar('OVERRIDES', True) overrides = localdata.getVar('OVERRIDES', True)
if not overrides: if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined') raise bb.build.FuncFailed('OVERRIDES not defined')
localdata.setVar('OVERRIDES', label + ':' + overrides) localdata.setVar('OVERRIDES', label + ':' + overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label)) cfgfile.write('LABEL %s\nKERNEL /vmlinuz\n' % (label))
append = localdata.getVar('APPEND', True) append = localdata.getVar('APPEND', True)
initrd = localdata.getVar('INITRD', True) initrd = localdata.getVar('INITRD', True)
if append: if append:
cfgfile.write('APPEND ') cfgfile.write('APPEND ')
if initrd: if initrd:
cfgfile.write('initrd=/initrd ') cfgfile.write('initrd=/initrd ')
cfgfile.write('LABEL=%s '% (label)) cfgfile.write('LABEL=%s '% (label))
cfgfile.write('%s\n' % (append)) cfgfile.write('%s\n' % (append))
cfgfile.close() cfgfile.close()
} }

View File

@ -113,13 +113,13 @@ def update_alternatives_after_parse(d):
# Convert old format to new format... # Convert old format to new format...
alt_links = d.getVar('ALTERNATIVE_LINKS', True) or "" alt_links = d.getVar('ALTERNATIVE_LINKS', True) or ""
for alt_link in alt_links.split(): for alt_link in alt_links.split():
alt_name = os.path.basename(alt_link) alt_name = os.path.basename(alt_link)
alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or "" alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
alternative += " " + alt_name alternative += " " + alt_name
d.setVar('ALTERNATIVE_%s' % pn, alternative) d.setVar('ALTERNATIVE_%s' % pn, alternative)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link) d.setVarFlag('ALTERNATIVE_TARGET', alt_name, alt_link)
return return
if d.getVar('ALTERNATIVE_NAME') != None or d.getVar('ALTERNATIVE_PATH') != None: if d.getVar('ALTERNATIVE_NAME') != None or d.getVar('ALTERNATIVE_PATH') != None:
@ -128,15 +128,15 @@ def update_alternatives_after_parse(d):
alt_path = d.getVar('ALTERNATIVE_PATH', True) alt_path = d.getVar('ALTERNATIVE_PATH', True)
alt_link = d.getVar('ALTERNATIVE_LINK', True) or ("%s/%s" % (d.getVar('bindir', True), alt_name)) alt_link = d.getVar('ALTERNATIVE_LINK', True) or ("%s/%s" % (d.getVar('bindir', True), alt_name))
if alt_name == None: if alt_name == None:
raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE') raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % d.getVar('FILE')
if alt_path == None: if alt_path == None:
raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE') raise bb.build.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % d.getVar('FILE')
alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or "" alternative = d.getVar('ALTERNATIVE_%s' % pn, True) or ""
alternative += " " + alt_name alternative += " " + alt_name
# Fix the alt_path if it's relative # Fix the alt_path if it's relative
alt_path = os.path.join(os.path.dirname(alt_link), alt_path) alt_path = os.path.join(os.path.dirname(alt_link), alt_path)
d.setVar('ALTERNATIVE_%s' % pn, alternative) d.setVar('ALTERNATIVE_%s' % pn, alternative)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
@ -199,144 +199,144 @@ populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}"
# the split and strip steps.. packagecopy seems to be the earliest reasonable # the split and strip steps.. packagecopy seems to be the earliest reasonable
# place. # place.
python perform_packagecopy_append () { python perform_packagecopy_append () {
# Check for deprecated usage... # Check for deprecated usage...
pn = d.getVar('BPN', True) pn = d.getVar('BPN', True)
if d.getVar('ALTERNATIVE_LINKS', True) != None: if d.getVar('ALTERNATIVE_LINKS', True) != None:
bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn) bb.warn('%s: Use of ALTERNATIVE_LINKS is deprecated, see update-alternatives.bbclass for more info.' % pn)
if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None: if d.getVar('ALTERNATIVE_NAME', True) != None or d.getVar('ALTERNATIVE_PATH', True) != None:
bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn) bb.warn('%s: Use of ALTERNATIVE_NAME is deprecated, see update-alternatives.bbclass for more info.' % pn)
# Do actual update alternatives processing # Do actual update alternatives processing
pkgdest = d.getVar('PKGD', True) pkgdest = d.getVar('PKGD', True)
for pkg in (d.getVar('PACKAGES', True) or "").split(): for pkg in (d.getVar('PACKAGES', True) or "").split():
# If the src == dest, we know we need to rename the dest by appending ${BPN} # If the src == dest, we know we need to rename the dest by appending ${BPN}
link_rename = {} link_rename = {}
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
if not alt_link: if not alt_link:
alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name) alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
# Sometimes alt_target is specified as relative to the link name. # Sometimes alt_target is specified as relative to the link name.
alt_target = os.path.join(os.path.dirname(alt_link), alt_target) alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
# If the link and target are the same name, we need to rename the target. # If the link and target are the same name, we need to rename the target.
if alt_link == alt_target: if alt_link == alt_target:
src = '%s/%s' % (pkgdest, alt_target) src = '%s/%s' % (pkgdest, alt_target)
alt_target_rename = '%s.%s' % (alt_target, pn) alt_target_rename = '%s.%s' % (alt_target, pn)
dest = '%s/%s' % (pkgdest, alt_target_rename) dest = '%s/%s' % (pkgdest, alt_target_rename)
if os.path.lexists(dest): if os.path.lexists(dest):
bb.note('%s: Already renamed: %s' % (pn, alt_target_rename)) bb.note('%s: Already renamed: %s' % (pn, alt_target_rename))
elif os.path.lexists(src): elif os.path.lexists(src):
if os.path.islink(src): if os.path.islink(src):
# Delay rename of links # Delay rename of links
link_rename[alt_target] = alt_target_rename link_rename[alt_target] = alt_target_rename
else: else:
bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename)) bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename))
os.rename(src, dest) os.rename(src, dest)
else: else:
bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename)) bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename))
continue continue
d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename) d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename)
# Process delayed link names # Process delayed link names
# Do these after other renames so we can correct broken links # Do these after other renames so we can correct broken links
for alt_target in link_rename: for alt_target in link_rename:
src = '%s/%s' % (pkgdest, alt_target) src = '%s/%s' % (pkgdest, alt_target)
dest = '%s/%s' % (pkgdest, link_rename[alt_target]) dest = '%s/%s' % (pkgdest, link_rename[alt_target])
link = os.readlink(src) link = os.readlink(src)
if os.path.isabs(link): if os.path.isabs(link):
link_target = pkgdest + os.readlink(src) link_target = pkgdest + os.readlink(src)
else: else:
link_target = os.path.join(os.path.dirname(src), link) link_target = os.path.join(os.path.dirname(src), link)
if os.path.lexists(link_target): if os.path.lexists(link_target):
# Ok, the link_target exists, we can rename # Ok, the link_target exists, we can rename
bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target])) bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, link_rename[alt_target]))
os.rename(src, dest) os.rename(src, dest)
else: else:
# Try to resolve the broken link to link.${BPN} # Try to resolve the broken link to link.${BPN}
link_maybe = '%s.%s' % (os.readlink(src), pn) link_maybe = '%s.%s' % (os.readlink(src), pn)
if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)): if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)):
# Ok, the renamed link target exists.. create a new link, and remove the original # Ok, the renamed link target exists.. create a new link, and remove the original
bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe)) bb.note('%s: Creating new link %s -> %s' % (pn, link_rename[alt_target], link_maybe))
os.symlink(link_maybe, dest) os.symlink(link_maybe, dest)
os.unlink(src) os.unlink(src)
else: else:
bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target)) bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target))
} }
python populate_packages_prepend () { python populate_packages_prepend () {
pn = d.getVar('BPN', True) pn = d.getVar('BPN', True)
# Do actual update alternatives processing # Do actual update alternatives processing
pkgdest = d.getVar('PKGD', True) pkgdest = d.getVar('PKGD', True)
for pkg in (d.getVar('PACKAGES', True) or "").split(): for pkg in (d.getVar('PACKAGES', True) or "").split():
# Create post install/removal scripts # Create post install/removal scripts
alt_setup_links = "" alt_setup_links = ""
alt_remove_links = "" alt_remove_links = ""
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
# Sometimes alt_target is specified as relative to the link name. # Sometimes alt_target is specified as relative to the link name.
alt_target = os.path.join(os.path.dirname(alt_link), alt_target) alt_target = os.path.join(os.path.dirname(alt_link), alt_target)
alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True) alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True)
alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True) alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True)
# This shouldn't trigger, as it should have been resolved earlier! # This shouldn't trigger, as it should have been resolved earlier!
if alt_link == alt_target: if alt_link == alt_target:
bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target)) bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target))
alt_target = '%s.%s' % (alt_target, pn) alt_target = '%s.%s' % (alt_target, pn)
if not os.path.lexists('%s/%s' % (pkgdest, alt_target)): if not os.path.lexists('%s/%s' % (pkgdest, alt_target)):
bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target)) bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target))
continue continue
# Default to generate shell script.. eventually we may want to change this... # Default to generate shell script.. eventually we may want to change this...
alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link)) alt_target = os.path.relpath(alt_target, os.path.dirname(alt_link))
alt_setup_links += ' update-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority) alt_setup_links += '\tupdate-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
alt_remove_links += ' update-alternatives --remove %s %s\n' % (alt_name, alt_target) alt_remove_links += '\tupdate-alternatives --remove %s %s\n' % (alt_name, alt_target)
if alt_setup_links: if alt_setup_links:
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg) bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
bb.note('%s' % alt_setup_links) bb.note('%s' % alt_setup_links)
postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n' postinst = (d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True)) or '#!/bin/sh\n'
postinst += alt_setup_links postinst += alt_setup_links
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
bb.note('%s' % alt_remove_links) bb.note('%s' % alt_remove_links)
postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n' postrm = (d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True)) or '#!/bin/sh\n'
postrm += alt_remove_links postrm += alt_remove_links
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
} }
python package_do_filedeps_append () { python package_do_filedeps_append () {
pn = d.getVar('BPN', True) pn = d.getVar('BPN', True)
pkgdest = d.getVar('PKGDEST', True) pkgdest = d.getVar('PKGDEST', True)
for pkg in packages.split(): for pkg in packages.split():
for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split():
alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True)
alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True)
alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link
if alt_link == alt_target: if alt_link == alt_target:
bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target)) bb.warn('alt_link == alt_target: %s == %s' % (alt_link, alt_target))
alt_target = '%s.%s' % (alt_target, pn) alt_target = '%s.%s' % (alt_target, pn)
if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)): if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)):
continue continue
# Add file provide # Add file provide
trans_target = file_translate(alt_target) trans_target = file_translate(alt_target)
d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link) d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""): if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""):
d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target) d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)
} }

View File

@ -44,42 +44,42 @@ python __anonymous() {
} }
python populate_packages_prepend () { python populate_packages_prepend () {
def update_rcd_package(pkg): def update_rcd_package(pkg):
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg) bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", True) overrides = localdata.getVar("OVERRIDES", True)
localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides))
bb.data.update_data(localdata) bb.data.update_data(localdata)
""" """
update_rc.d postinst is appended here because pkg_postinst may require to update_rc.d postinst is appended here because pkg_postinst may require to
execute on the target. Not doing so may cause update_rc.d postinst invoked execute on the target. Not doing so may cause update_rc.d postinst invoked
twice to cause unwanted warnings. twice to cause unwanted warnings.
""" """
postinst = localdata.getVar('pkg_postinst', True) postinst = localdata.getVar('pkg_postinst', True)
if not postinst: if not postinst:
postinst = '#!/bin/sh\n' postinst = '#!/bin/sh\n'
postinst += localdata.getVar('updatercd_postinst', True) postinst += localdata.getVar('updatercd_postinst', True)
d.setVar('pkg_postinst_%s' % pkg, postinst) d.setVar('pkg_postinst_%s' % pkg, postinst)
prerm = localdata.getVar('pkg_prerm', True) prerm = localdata.getVar('pkg_prerm', True)
if not prerm: if not prerm:
prerm = '#!/bin/sh\n' prerm = '#!/bin/sh\n'
prerm += localdata.getVar('updatercd_prerm', True) prerm += localdata.getVar('updatercd_prerm', True)
d.setVar('pkg_prerm_%s' % pkg, prerm) d.setVar('pkg_prerm_%s' % pkg, prerm)
postrm = localdata.getVar('pkg_postrm', True) postrm = localdata.getVar('pkg_postrm', True)
if not postrm: if not postrm:
postrm = '#!/bin/sh\n' postrm = '#!/bin/sh\n'
postrm += localdata.getVar('updatercd_postrm', True) postrm += localdata.getVar('updatercd_postrm', True)
d.setVar('pkg_postrm_%s' % pkg, postrm) d.setVar('pkg_postrm_%s' % pkg, postrm)
pkgs = d.getVar('INITSCRIPT_PACKAGES', True) pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
if pkgs == None: if pkgs == None:
pkgs = d.getVar('UPDATERCPN', True) pkgs = d.getVar('UPDATERCPN', True)
packages = (d.getVar('PACKAGES', True) or "").split() packages = (d.getVar('PACKAGES', True) or "").split()
if not pkgs in packages and packages != []: if not pkgs in packages and packages != []:
pkgs = packages[0] pkgs = packages[0]
for pkg in pkgs.split(): for pkg in pkgs.split():
update_rcd_package(pkg) update_rcd_package(pkg)
} }

View File

@ -154,61 +154,61 @@ do_package_setscene[depends] = "${USERADDSETSCENEDEPS}"
# Recipe parse-time sanity checks # Recipe parse-time sanity checks
def update_useradd_after_parse(d): def update_useradd_after_parse(d):
useradd_packages = d.getVar('USERADD_PACKAGES', True) useradd_packages = d.getVar('USERADD_PACKAGES', True)
if not useradd_packages: if not useradd_packages:
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE') raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE')
for pkg in useradd_packages.split(): for pkg in useradd_packages.split():
if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True): if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True):
raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg) raise bb.build.FuncFailed, "%s inherits useradd but doesn't set USERADD_PARAM or GROUPADD_PARAM for package %s" % (d.getVar('FILE'), pkg)
python __anonymous() { python __anonymous() {
update_useradd_after_parse(d) update_useradd_after_parse(d)
} }
# Return a single [GROUP|USER]ADD_PARAM formatted string which includes the # Return a single [GROUP|USER]ADD_PARAM formatted string which includes the
# [group|user]add parameters for all USERADD_PACKAGES in this recipe # [group|user]add parameters for all USERADD_PACKAGES in this recipe
def get_all_cmd_params(d, cmd_type): def get_all_cmd_params(d, cmd_type):
import string import string
param_type = cmd_type.upper() + "ADD_PARAM_%s" param_type = cmd_type.upper() + "ADD_PARAM_%s"
params = [] params = []
useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
for pkg in useradd_packages.split(): for pkg in useradd_packages.split():
param = d.getVar(param_type % pkg, True) param = d.getVar(param_type % pkg, True)
if param: if param:
params.append(param) params.append(param)
return string.join(params, "; ") return string.join(params, "; ")
# Adds the preinst script into generated packages # Adds the preinst script into generated packages
fakeroot python populate_packages_prepend () { fakeroot python populate_packages_prepend () {
def update_useradd_package(pkg): def update_useradd_package(pkg):
bb.debug(1, 'adding user/group calls to preinst for %s' % pkg) bb.debug(1, 'adding user/group calls to preinst for %s' % pkg)
""" """
useradd preinst is appended here because pkg_preinst may be useradd preinst is appended here because pkg_preinst may be
required to execute on the target. Not doing so may cause required to execute on the target. Not doing so may cause
useradd preinst to be invoked twice, causing unwanted warnings. useradd preinst to be invoked twice, causing unwanted warnings.
""" """
preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True) preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True)
if not preinst: if not preinst:
preinst = '#!/bin/sh\n' preinst = '#!/bin/sh\n'
preinst += d.getVar('useradd_preinst', True) preinst += d.getVar('useradd_preinst', True)
d.setVar('pkg_preinst_%s' % pkg, preinst) d.setVar('pkg_preinst_%s' % pkg, preinst)
# RDEPENDS setup # RDEPENDS setup
rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or ""
rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd' rdepends += ' ' + d.getVar('MLPREFIX') + 'base-passwd'
rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow' rdepends += ' ' + d.getVar('MLPREFIX') + 'shadow'
d.setVar("RDEPENDS_%s" % pkg, rdepends) d.setVar("RDEPENDS_%s" % pkg, rdepends)
# Add the user/group preinstall scripts and RDEPENDS requirements # Add the user/group preinstall scripts and RDEPENDS requirements
# to packages specified by USERADD_PACKAGES # to packages specified by USERADD_PACKAGES
if not bb.data.inherits_class('nativesdk', d): if not bb.data.inherits_class('nativesdk', d):
useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" useradd_packages = d.getVar('USERADD_PACKAGES', True) or ""
for pkg in useradd_packages.split(): for pkg in useradd_packages.split():
update_useradd_package(pkg) update_useradd_package(pkg)
} }

View File

@ -1,13 +1,13 @@
addtask listtasks addtask listtasks
do_listtasks[nostamp] = "1" do_listtasks[nostamp] = "1"
python do_listtasks() { python do_listtasks() {
import sys import sys
# emit variables and shell functions # emit variables and shell functions
#bb.data.emit_env(sys.__stdout__, d) #bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell # emit the metadata which isnt valid shell
for e in d.keys(): for e in d.keys():
if d.getVarFlag(e, 'task'): if d.getVarFlag(e, 'task'):
bb.plain("%s" % e) bb.plain("%s" % e)
} }
CLEANFUNCS ?= "" CLEANFUNCS ?= ""
@ -15,34 +15,34 @@ CLEANFUNCS ?= ""
addtask clean addtask clean
do_clean[nostamp] = "1" do_clean[nostamp] = "1"
python do_clean() { python do_clean() {
"""clear the build and temp directories""" """clear the build and temp directories"""
dir = d.expand("${WORKDIR}") dir = d.expand("${WORKDIR}")
bb.note("Removing " + dir) bb.note("Removing " + dir)
oe.path.remove(dir) oe.path.remove(dir)
dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d) dir = "%s.*" % bb.data.expand(d.getVar('STAMP'), d)
bb.note("Removing " + dir) bb.note("Removing " + dir)
oe.path.remove(dir) oe.path.remove(dir)
for f in (d.getVar('CLEANFUNCS', True) or '').split(): for f in (d.getVar('CLEANFUNCS', True) or '').split():
bb.build.exec_func(f, d) bb.build.exec_func(f, d)
} }
addtask checkuri addtask checkuri
do_checkuri[nostamp] = "1" do_checkuri[nostamp] = "1"
python do_checkuri() { python do_checkuri() {
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI', True) or "").split()
if len(src_uri) == 0: if len(src_uri) == 0:
return return
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
bb.data.update_data(localdata) bb.data.update_data(localdata)
try: try:
fetcher = bb.fetch2.Fetch(src_uri, localdata) fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.checkstatus() fetcher.checkstatus()
except bb.fetch2.BBFetchException, e: except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e) raise bb.build.FuncFailed(e)
} }
addtask checkuriall after do_checkuri addtask checkuriall after do_checkuri

View File

@ -292,77 +292,77 @@ END
} }
def check_app_exists(app, d): def check_app_exists(app, d):
from bb import which, data from bb import which, data
app = data.expand(app, d) app = data.expand(app, d)
path = data.getVar('PATH', d, 1) path = data.getVar('PATH', d, 1)
return bool(which(path, app)) return bool(which(path, app))
def explode_deps(s): def explode_deps(s):
return bb.utils.explode_deps(s) return bb.utils.explode_deps(s)
def base_set_filespath(path, d): def base_set_filespath(path, d):
filespath = [] filespath = []
extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "")
# Don't prepend empty strings to the path list # Don't prepend empty strings to the path list
if extrapaths != "": if extrapaths != "":
path = extrapaths.split(":") + path path = extrapaths.split(":") + path
# The ":" ensures we have an 'empty' override # The ":" ensures we have an 'empty' override
overrides = (d.getVar("OVERRIDES", True) or "") + ":" overrides = (d.getVar("OVERRIDES", True) or "") + ":"
for p in path: for p in path:
if p != "": if p != "":
for o in overrides.split(":"): for o in overrides.split(":"):
filespath.append(os.path.join(p, o)) filespath.append(os.path.join(p, o))
return ":".join(filespath) return ":".join(filespath)
def extend_variants(d, var, extend, delim=':'): def extend_variants(d, var, extend, delim=':'):
"""Return a string of all bb class extend variants for the given extend""" """Return a string of all bb class extend variants for the given extend"""
variants = [] variants = []
whole = d.getVar(var, True) or "" whole = d.getVar(var, True) or ""
for ext in whole.split(): for ext in whole.split():
eext = ext.split(delim) eext = ext.split(delim)
if len(eext) > 1 and eext[0] == extend: if len(eext) > 1 and eext[0] == extend:
variants.append(eext[1]) variants.append(eext[1])
return " ".join(variants) return " ".join(variants)
def multilib_pkg_extend(d, pkg): def multilib_pkg_extend(d, pkg):
variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split() variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split()
if not variants: if not variants:
return pkg return pkg
pkgs = pkg pkgs = pkg
for v in variants: for v in variants:
pkgs = pkgs + " " + v + "-" + pkg pkgs = pkgs + " " + v + "-" + pkg
return pkgs return pkgs
def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '): def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '):
"""Return a string of all ${var} in all multilib tune configuration""" """Return a string of all ${var} in all multilib tune configuration"""
values = [] values = []
value = d.getVar(var, True) or "" value = d.getVar(var, True) or ""
if value != "": if value != "":
if need_split: if need_split:
for item in value.split(delim): for item in value.split(delim):
values.append(item) values.append(item)
else: else:
values.append(value) values.append(value)
variants = d.getVar("MULTILIB_VARIANTS", True) or "" variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split(): for item in variants.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides) localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
value = localdata.getVar(var, True) or "" value = localdata.getVar(var, True) or ""
if value != "": if value != "":
if need_split: if need_split:
for item in value.split(delim): for item in value.split(delim):
values.append(item) values.append(item)
else: else:
values.append(value) values.append(value)
if unique: if unique:
#we do this to keep order as much as possible #we do this to keep order as much as possible
ret = [] ret = []
for value in values: for value in values:
if not value in ret: if not value in ret:
ret.append(value) ret.append(value)
else: else:
ret = values ret = values
return " ".join(ret) return " ".join(ret)

View File

@ -96,20 +96,19 @@ RDEPENDS_${PN} = "\
PACKAGES_DYNAMIC = "${PN}-plugin-*" PACKAGES_DYNAMIC = "${PN}-plugin-*"
python populate_packages_prepend() { python populate_packages_prepend() {
depmap = dict( pppd="ppp", depmap = dict(pppd="ppp")
) packages = []
packages = [] multilib_prefix = (d.getVar("MLPREFIX", True) or "")
multilib_prefix = (d.getVar("MLPREFIX", True) or "") hook = lambda file,pkg,b,c,d:packages.append((file,pkg))
hook = lambda file,pkg,b,c,d:packages.append((file,pkg)) plugin_dir = d.expand('${libdir}/connman/plugins/')
plugin_dir = d.expand('${libdir}/connman/plugins/') plugin_name = d.expand('${PN}-plugin-%s')
plugin_name = d.expand('${PN}-plugin-%s') do_split_packages(d, plugin_dir, '^(.*).so$', plugin_name, '${PN} plugin for %s', extra_depends='', hook=hook, prepend=True )
do_split_packages(d, plugin_dir, '^(.*).so$', plugin_name, '${PN} plugin for %s', extra_depends='', hook=hook, prepend=True ) for (file, package) in packages:
for (file, package) in packages: plugintype = package.split( '-' )[-1]
plugintype = package.split( '-' )[-1] if plugintype in depmap:
if plugintype in depmap: rdepends = map(lambda x: multilib_prefix + x, depmap[plugintype].split())
rdepends = map(lambda x: multilib_prefix + x, depmap[plugintype].split()) bb.note( "Adding rdependency on %s to %s" % ( rdepends, package ) )
bb.note( "Adding rdependency on %s to %s" % ( rdepends, package ) ) d.setVar("RDEPENDS_%s" % package, " ".join(rdepends))
d.setVar("RDEPENDS_%s" % package, " ".join(rdepends))
} }
PACKAGES =+ "${PN}-tools ${PN}-tests" PACKAGES =+ "${PN}-tools ${PN}-tests"

View File

@ -52,30 +52,30 @@ base_passwd_sstate_postinst() {
} }
python populate_packages_prepend() { python populate_packages_prepend() {
# Add in the preinst function for ${PN} # Add in the preinst function for ${PN}
# We have to do this here as prior to this, passwd/group.master # We have to do this here as prior to this, passwd/group.master
# would be unavailable. We need to create these files at preinst # would be unavailable. We need to create these files at preinst
# time before the files from the package may be available, hence # time before the files from the package may be available, hence
# storing the data from the files in the preinst directly. # storing the data from the files in the preinst directly.
f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r') f = open(d.expand("${STAGING_DATADIR}/base-passwd/passwd.master"), 'r')
passwd = "".join(f.readlines()) passwd = "".join(f.readlines())
f.close() f.close()
f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r') f = open(d.expand("${STAGING_DATADIR}/base-passwd/group.master"), 'r')
group = "".join(f.readlines()) group = "".join(f.readlines())
f.close() f.close()
preinst = """#!/bin/sh preinst = """#!/bin/sh
if [ ! -e $D${sysconfdir}/passwd ]; then if [ ! -e $D${sysconfdir}/passwd ]; then
cat << EOF > $D${sysconfdir}/passwd \tcat << EOF > $D${sysconfdir}/passwd
""" + passwd + """EOF """ + passwd + """EOF
fi fi
if [ ! -e $D${sysconfdir}/group ]; then if [ ! -e $D${sysconfdir}/group ]; then
cat << EOF > $D${sysconfdir}/group \tcat << EOF > $D${sysconfdir}/group
""" + group + """EOF """ + group + """EOF
fi fi
""" """
d.setVar('pkg_preinst_${PN}', preinst) d.setVar('pkg_preinst_${PN}', preinst)
} }
addtask do_package after do_populate_sysroot addtask do_package after do_populate_sysroot

View File

@ -218,23 +218,23 @@ ALTERNATIVE_TARGET[syslog-startup-conf] = "${sysconfdir}/syslog-startup.conf.${B
ALTERNATIVE_TARGET = "/bin/busybox" ALTERNATIVE_TARGET = "/bin/busybox"
python do_package_prepend () { python do_package_prepend () {
# We need to load the full set of busybox provides from the /etc/busybox.links # We need to load the full set of busybox provides from the /etc/busybox.links
# Use this to see the update-alternatives with the right information # Use this to see the update-alternatives with the right information
dvar = d.getVar('D', True) dvar = d.getVar('D', True)
pn = d.getVar('PN', True) pn = d.getVar('PN', True)
f = open('%s/etc/busybox.links' % (dvar), 'r') f = open('%s/etc/busybox.links' % (dvar), 'r')
for alt_link_name in f: for alt_link_name in f:
alt_link_name = alt_link_name.strip() alt_link_name = alt_link_name.strip()
alt_name = os.path.basename(alt_link_name) alt_name = os.path.basename(alt_link_name)
# Match coreutils # Match coreutils
if alt_name == '[': if alt_name == '[':
alt_name = 'lbracket' alt_name = 'lbracket'
d.appendVar('ALTERNATIVE_%s' % (pn), ' ' + alt_name) d.appendVar('ALTERNATIVE_%s' % (pn), ' ' + alt_name)
d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link_name) d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link_name)
} }
pkg_postinst_${PN} () { pkg_postinst_${PN} () {

View File

@ -20,7 +20,7 @@ SRC_URI = "svn://www.eglibc.org/svn/branches/;module=${EGLIBC_BRANCH};protocol=h
S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef" S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef"
do_unpack_append() { do_unpack_append() {
bb.build.exec_func('do_move_ports', d) bb.build.exec_func('do_move_ports', d)
} }
do_move_ports() { do_move_ports() {

View File

@ -20,7 +20,7 @@ SRC_URI = "svn://www.eglibc.org/svn/branches/;module=${EGLIBC_BRANCH};protocol=h
S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef" S = "${WORKDIR}/${EGLIBC_BRANCH}/localedef"
do_unpack_append() { do_unpack_append() {
bb.build.exec_func('do_move_ports', d) bb.build.exec_func('do_move_ports', d)
} }
do_move_ports() { do_move_ports() {

View File

@ -1,54 +1,54 @@
def ld_append_if_tune_exists(d, infos, dict): def ld_append_if_tune_exists(d, infos, dict):
tune = d.getVar("DEFAULTTUNE", True) or "" tune = d.getVar("DEFAULTTUNE", True) or ""
libdir = d.getVar("base_libdir", True) or "" libdir = d.getVar("base_libdir", True) or ""
if dict.has_key(tune): if dict.has_key(tune):
infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }') infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }')
infos['lddrewrite'].add(libdir+'/'+dict[tune][0]) infos['lddrewrite'].add(libdir+'/'+dict[tune][0])
def eglibc_dl_info(d): def eglibc_dl_info(d):
ld_info_all = { ld_info_all = {
"mips": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mipsel": ["ld.so.1", "FLAG_ELF_LIBC6"], "mipsel": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64el-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64el": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64el-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64el-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], "mips64el-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc": ["ld.so.1", "FLAG_ELF_LIBC6"], "powerpc": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], "powerpc-nf": ["ld.so.1", "FLAG_ELF_LIBC6"],
"powerpc64": ["ld64.so.1", "FLAG_ELF_LIBC6"], "powerpc64": ["ld64.so.1", "FLAG_ELF_LIBC6"],
"powerpc64-nf": ["ld64.so.1", "FLAG_ELF_LIBC6"], "powerpc64-nf": ["ld64.so.1", "FLAG_ELF_LIBC6"],
"core2": ["ld-linux.so.2", "FLAG_ELF_LIBC6"], "core2": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
"core2-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"], "core2-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
"x86": ["ld-linux.so.2", "FLAG_ELF_LIBC6"], "x86": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
"x86-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"], "x86-64": ["ld-linux-x86-64.so.2", "FLAG_ELF_LIBC6"],
"i586": ["ld-linux.so.2", "FLAG_ELF_LIBC6"], "i586": ["ld-linux.so.2", "FLAG_ELF_LIBC6"],
} }
infos = {'ldconfig':set(), 'lddrewrite':set()} infos = {'ldconfig':set(), 'lddrewrite':set()}
ld_append_if_tune_exists(d, infos, ld_info_all) ld_append_if_tune_exists(d, infos, ld_info_all)
#DEFAULTTUNE_MULTILIB_ORIGINAL #DEFAULTTUNE_MULTILIB_ORIGINAL
original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True) original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True)
if original_tune: if original_tune:
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
localdata.setVar("DEFAULTTUNE", original_tune) localdata.setVar("DEFAULTTUNE", original_tune)
ld_append_if_tune_exists(localdata, infos, ld_info_all) ld_append_if_tune_exists(localdata, infos, ld_info_all)
variants = d.getVar("MULTILIB_VARIANTS", True) or "" variants = d.getVar("MULTILIB_VARIANTS", True) or ""
for item in variants.split(): for item in variants.split():
localdata = bb.data.createCopy(d) localdata = bb.data.createCopy(d)
overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
localdata.setVar("OVERRIDES", overrides) localdata.setVar("OVERRIDES", overrides)
bb.data.update_data(localdata) bb.data.update_data(localdata)
ld_append_if_tune_exists(localdata, infos, ld_info_all) ld_append_if_tune_exists(localdata, infos, ld_info_all)
infos['ldconfig'] = ','.join(infos['ldconfig']) infos['ldconfig'] = ','.join(infos['ldconfig'])
infos['lddrewrite'] = ' '.join(infos['lddrewrite']) infos['lddrewrite'] = ' '.join(infos['lddrewrite'])
return infos return infos
EGLIBC_KNOWN_INTERPRETER_NAMES = "${@eglibc_dl_info(d)['ldconfig']}" EGLIBC_KNOWN_INTERPRETER_NAMES = "${@eglibc_dl_info(d)['ldconfig']}"
RTLDLIST = "${@eglibc_dl_info(d)['lddrewrite']}" RTLDLIST = "${@eglibc_dl_info(d)['lddrewrite']}"

View File

@ -1,14 +1,14 @@
def eglibc_cfg(feature, features, tokens, cnf): def eglibc_cfg(feature, features, tokens, cnf):
if type(tokens) == type(""): if type(tokens) == type(""):
tokens = [tokens] tokens = [tokens]
if type(features) == type([]) and feature in features: if type(features) == type([]) and feature in features:
cnf.extend([token + ' = y' for token in tokens]) cnf.extend([token + ' = y' for token in tokens])
else: else:
for token in tokens: for token in tokens:
cnf.extend([token + ' = n']) cnf.extend([token + ' = n'])
if token == 'OPTION_EGLIBC_NSSWITCH': if token == 'OPTION_EGLIBC_NSSWITCH':
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG = ${S}/nss/nsswitch.conf"]) cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG = ${S}/nss/nsswitch.conf"])
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS = ${S}/nss/fixed-nsswitch.functions"]) cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS = ${S}/nss/fixed-nsswitch.functions"])
# arrange the dependencies among eglibc configuable options according to file option-groups.def from eglibc source code # arrange the dependencies among eglibc configuable options according to file option-groups.def from eglibc source code
def distro_features_check_deps(distro_features): def distro_features_check_deps(distro_features):

View File

@ -78,7 +78,7 @@ EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}" EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
do_unpack_append() { do_unpack_append() {
bb.build.exec_func('do_move_ports', d) bb.build.exec_func('do_move_ports', d)
} }
do_move_ports() { do_move_ports() {
@ -89,8 +89,8 @@ do_move_ports() {
} }
do_patch_append() { do_patch_append() {
bb.build.exec_func('do_fix_ia_headers', d) bb.build.exec_func('do_fix_ia_headers', d)
bb.build.exec_func('do_fix_readlib_c', d) bb.build.exec_func('do_fix_readlib_c', d)
} }
# for mips eglibc now builds syscall tables for all abi's # for mips eglibc now builds syscall tables for all abi's

View File

@ -76,7 +76,7 @@ EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \
EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}" EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}"
do_unpack_append() { do_unpack_append() {
bb.build.exec_func('do_move_ports', d) bb.build.exec_func('do_move_ports', d)
} }
do_move_ports() { do_move_ports() {
@ -87,7 +87,7 @@ do_move_ports() {
} }
do_patch_append() { do_patch_append() {
bb.build.exec_func('do_fix_readlib_c', d) bb.build.exec_func('do_fix_readlib_c', d)
} }
# for mips eglibc now builds syscall tables for all abi's # for mips eglibc now builds syscall tables for all abi's

View File

@ -38,9 +38,9 @@ export STAGING_INCDIR
export LDFLAGS += "-ldl" export LDFLAGS += "-ldl"
python populate_packages_prepend () { python populate_packages_prepend () {
# autonamer would call this libxml2-2, but we don't want that # autonamer would call this libxml2-2, but we don't want that
if d.getVar('DEBIAN_NAMES', True): if d.getVar('DEBIAN_NAMES', True):
d.setVar('PKG_libxml2', '${MLPREFIX}libxml2') d.setVar('PKG_libxml2', '${MLPREFIX}libxml2')
} }
PACKAGES += "${PN}-utils" PACKAGES += "${PN}-utils"

View File

@ -119,8 +119,8 @@ _install_cfgs = "\
" "
python do_install () { python do_install () {
bb.build.exec_func("shell_do_install", d) bb.build.exec_func("shell_do_install", d)
oe.path.make_relative_symlink(d.expand("${D}${libdir}/libtinfo.so")) oe.path.make_relative_symlink(d.expand("${D}${libdir}/libtinfo.so"))
} }
shell_do_install() { shell_do_install() {
@ -205,12 +205,12 @@ shell_do_install() {
} }
python populate_packages_prepend () { python populate_packages_prepend () {
libdir = d.expand("${libdir}") libdir = d.expand("${libdir}")
base_libdir = d.expand("${base_libdir}") base_libdir = d.expand("${base_libdir}")
pnbase = d.expand("${PN}-lib%s") pnbase = d.expand("${PN}-lib%s")
do_split_packages(d, libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True) do_split_packages(d, libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
if libdir is not base_libdir: if libdir is not base_libdir:
do_split_packages(d, base_libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True) do_split_packages(d, base_libdir, '^lib(.*)\.so\..*', pnbase, 'ncurses %s library', prepend=True, extra_depends = '', allow_links=True)
} }

View File

@ -8,26 +8,26 @@ USE_NLS = "yes"
SRC_URI += "file://db_linking_hack.patch" SRC_URI += "file://db_linking_hack.patch"
python do_install () { python do_install () {
bb.build.exec_func('do_install_base', d) bb.build.exec_func('do_install_base', d)
bb.build.exec_func('do_install_config', d) bb.build.exec_func('do_install_config', d)
} }
python do_install_config () { python do_install_config () {
indir = os.path.dirname(d.getVar('FILE',1)) indir = os.path.dirname(d.getVar('FILE',1))
infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r') infile = file(os.path.join(indir, 'files', 'apt.conf'), 'r')
data = infile.read() data = infile.read()
infile.close() infile.close()
data = d.expand(data) data = d.expand(data)
outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt') outdir = os.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt')
if not os.path.exists(outdir): if not os.path.exists(outdir):
os.makedirs(outdir) os.makedirs(outdir)
outpath = os.path.join(outdir, 'apt.conf.sample') outpath = os.path.join(outdir, 'apt.conf.sample')
outfile = file(outpath, 'w') outfile = file(outpath, 'w')
outfile.write(data) outfile.write(data)
outfile.close() outfile.close()
} }
do_install_base () { do_install_base () {

View File

@ -34,23 +34,23 @@ apt-utils-manpages="doc/apt-extracttemplates.1 \
# doc/apt-ftparchive.1 # doc/apt-ftparchive.1
def get_files_apt_doc(d, bb, manpages): def get_files_apt_doc(d, bb, manpages):
import re import re
manpages = re.sub(r'\bdoc/(\S+)/(\S+)\.\1\.(.)\b', r'${mandir}/\1/man\3/\2.\3', manpages) manpages = re.sub(r'\bdoc/(\S+)/(\S+)\.\1\.(.)\b', r'${mandir}/\1/man\3/\2.\3', manpages)
manpages = re.sub(r'\bdoc/(\S+)\.(.)\b', r'${mandir}/man\2/\1.\2', manpages) manpages = re.sub(r'\bdoc/(\S+)\.(.)\b', r'${mandir}/man\2/\1.\2', manpages)
return manpages return manpages
def get_commands_apt_doc(d, bb, manpages): def get_commands_apt_doc(d, bb, manpages):
import os import os
s = list() s = list()
__dir_cache__ = list() __dir_cache__ = list()
for m in manpages.split(): for m in manpages.split():
dest = get_files_apt_doc(d, bb, m) dest = get_files_apt_doc(d, bb, m)
dir = os.path.dirname(dest) dir = os.path.dirname(dest)
if not dir in __dir_cache__: if not dir in __dir_cache__:
s.append("install -d ${D}/%s" % dir) s.append("install -d ${D}/%s" % dir)
__dir_cache__.append(dir) __dir_cache__.append(dir)
s.append("install -m 0644 %s ${D}/%s" % (m, dest)) s.append("install -m 0644 %s ${D}/%s" % (m, dest))
return "\n".join(s) return "\n".join(s)
PACKAGES += "${PN}-utils ${PN}-utils-doc" PACKAGES += "${PN}-utils ${PN}-utils-doc"
FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \

View File

@ -294,11 +294,11 @@ PACKAGES_append = " perl-modules "
RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', True).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}" RRECOMMENDS_perl-modules = "${@d.getVar('PACKAGES', True).replace('${PN}-modules ', '').replace('${PN}-dbg ', '').replace('${PN}-misc ', '').replace('${PN}-dev ', '').replace('${PN}-pod ', '').replace('${PN}-doc ', '')}"
python populate_packages_prepend () { python populate_packages_prepend () {
libdir = d.expand('${libdir}/perl/${PV}') libdir = d.expand('${libdir}/perl/${PV}')
do_split_packages(d, libdir, 'auto/(Encode/.[^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) do_split_packages(d, libdir, 'auto/(Encode/.[^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, 'auto/([^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) do_split_packages(d, libdir, 'auto/([^/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, 'Module/([^\/]*).*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) do_split_packages(d, libdir, 'Module/([^\/]*).*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False)
} }
PACKAGES_DYNAMIC = "perl-module-*" PACKAGES_DYNAMIC = "perl-module-*"

View File

@ -9,10 +9,10 @@ def get_qemu_target_list(d):
for arch in ['mips64', 'mips64el', 'ppcemb']: for arch in ['mips64', 'mips64el', 'ppcemb']:
if arch in archs: if arch in archs:
targets += arch + "-softmmu," targets += arch + "-softmmu,"
archs.remove(arch) archs.remove(arch)
for arch in ['armeb', 'alpha', 'ppc64abi32', 'sparc32plus']: for arch in ['armeb', 'alpha', 'ppc64abi32', 'sparc32plus']:
if arch in archs: if arch in archs:
targets += arch + "-linux-user," targets += arch + "-linux-user,"
archs.remove(arch) archs.remove(arch)
return targets + ''.join([arch + "-linux-user" + "," + arch + "-softmmu" + "," for arch in archs]).rstrip(',') return targets + ''.join([arch + "-linux-user" + "," + arch + "-softmmu" + "," for arch in archs]).rstrip(',')

View File

@ -58,10 +58,10 @@ fakeroot do_install () {
} }
python do_package_append() { python do_package_append() {
import subprocess import subprocess
# Change permissions back the way they were, they probably had a reason... # Change permissions back the way they were, they probably had a reason...
workdir = d.getVar('WORKDIR', True) workdir = d.getVar('WORKDIR', True)
subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True) subprocess.call('chmod 0511 %s/install/cups/var/run/cups/certs' % workdir, shell=True)
} }
PACKAGES =+ "${PN}-lib ${PN}-libimage" PACKAGES =+ "${PN}-lib ${PN}-libimage"

View File

@ -59,6 +59,6 @@ CONFFILES_${PN} = "${sysconfdir}/lighttpd.conf"
PACKAGES_DYNAMIC = "lighttpd-module-*" PACKAGES_DYNAMIC = "lighttpd-module-*"
python populate_packages_prepend () { python populate_packages_prepend () {
lighttpd_libdir = d.expand('${libdir}') lighttpd_libdir = d.expand('${libdir}')
do_split_packages(d, lighttpd_libdir, '^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='') do_split_packages(d, lighttpd_libdir, '^mod_(.*)\.so$', 'lighttpd-module-%s', 'Lighttpd module for %s', extra_depends='')
} }

View File

@ -49,7 +49,7 @@ FILES_${PN} += "/opt/ltp/* /opt/ltp/runtest/* /opt/ltp/scenario_groups/* /opt/lt
TARGET_CC_ARCH += "${LDFLAGS}" TARGET_CC_ARCH += "${LDFLAGS}"
do_unpack_append() { do_unpack_append() {
bb.build.exec_func('do_extract_tarball', d) bb.build.exec_func('do_extract_tarball', d)
} }
do_extract_tarball() { do_extract_tarball() {

View File

@ -46,8 +46,8 @@ do_unpack[cleandirs] += "${S}"
# We invoke base do_patch at end, to incorporate any local patch # We invoke base do_patch at end, to incorporate any local patch
python do_patch() { python do_patch() {
bb.build.exec_func('nettools_do_patch', d) bb.build.exec_func('nettools_do_patch', d)
bb.build.exec_func('patch_do_patch', d) bb.build.exec_func('patch_do_patch', d)
} }
do_configure() { do_configure() {

View File

@ -53,28 +53,28 @@ RDEPENDS_${PN}-xtests = "libpam pam-plugin-access pam-plugin-debug pam-plugin-cr
RRECOMMENDS_${PN} = "libpam-runtime" RRECOMMENDS_${PN} = "libpam-runtime"
python populate_packages_prepend () { python populate_packages_prepend () {
import os.path import os.path
def pam_plugin_append_file(pn, dir, file): def pam_plugin_append_file(pn, dir, file):
nf = os.path.join(dir, file) nf = os.path.join(dir, file)
of = d.getVar('FILES_' + pn, True) of = d.getVar('FILES_' + pn, True)
if of: if of:
nf = of + " " + nf nf = of + " " + nf
d.setVar('FILES_' + pn, nf) d.setVar('FILES_' + pn, nf)
dvar = bb.data.expand('${WORKDIR}/package', d, True) dvar = bb.data.expand('${WORKDIR}/package', d, True)
pam_libdir = d.expand('${base_libdir}/security') pam_libdir = d.expand('${base_libdir}/security')
pam_sbindir = d.expand('${sbindir}') pam_sbindir = d.expand('${sbindir}')
pam_filterdir = d.expand('${base_libdir}/security/pam_filter') pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', extra_depends='') do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', extra_depends='')
pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_chkpwd') pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_chkpwd')
pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_update') pam_plugin_append_file('pam-plugin-unix', pam_sbindir, 'unix_update')
pam_plugin_append_file('pam-plugin-tally', pam_sbindir, 'pam_tally') pam_plugin_append_file('pam-plugin-tally', pam_sbindir, 'pam_tally')
pam_plugin_append_file('pam-plugin-tally2', pam_sbindir, 'pam_tally2') pam_plugin_append_file('pam-plugin-tally2', pam_sbindir, 'pam_tally2')
pam_plugin_append_file('pam-plugin-timestamp', pam_sbindir, 'pam_timestamp_check') pam_plugin_append_file('pam-plugin-timestamp', pam_sbindir, 'pam_timestamp_check')
pam_plugin_append_file('pam-plugin-mkhomedir', pam_sbindir, 'mkhomedir_helper') pam_plugin_append_file('pam-plugin-mkhomedir', pam_sbindir, 'mkhomedir_helper')
do_split_packages(d, pam_filterdir, '^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='') do_split_packages(d, pam_filterdir, '^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
} }
do_install() { do_install() {

View File

@ -68,11 +68,11 @@ PACKAGES_DYNAMIC += "gdk-pixbuf-loader-*"
PACKAGES_DYNAMIC_virtclass-native = "" PACKAGES_DYNAMIC_virtclass-native = ""
python populate_packages_prepend () { python populate_packages_prepend () {
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True) postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders') loaders_root = d.expand('${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders')
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader) do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
} }
do_install_append_virtclass-native() { do_install_append_virtclass-native() {

View File

@ -32,20 +32,20 @@ LIBV = "2.10.0"
PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*" PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () { python populate_packages_prepend () {
import os.path import os.path
prologue = d.getVar("postinst_prologue", True) prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True) postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}') gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
loaders_root = os.path.join(gtk_libdir, 'loaders') loaders_root = os.path.join(gtk_libdir, 'loaders')
immodules_root = os.path.join(gtk_libdir, 'immodules') immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends'); printmodules_root = os.path.join(gtk_libdir, 'printbackends');
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader) do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', True)): if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
} }

View File

@ -32,20 +32,20 @@ LIBV = "2.10.0"
PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*" PACKAGES_DYNAMIC += "gdk-pixbuf-loader-* gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () { python populate_packages_prepend () {
import os.path import os.path
prologue = d.getVar("postinst_prologue", True) prologue = d.getVar("postinst_prologue", True)
postinst_pixbufloader = d.getVar("postinst_pixbufloader", True) postinst_pixbufloader = d.getVar("postinst_pixbufloader", True)
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}') gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
loaders_root = os.path.join(gtk_libdir, 'loaders') loaders_root = os.path.join(gtk_libdir, 'loaders')
immodules_root = os.path.join(gtk_libdir, 'immodules') immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends'); printmodules_root = os.path.join(gtk_libdir, 'printbackends');
do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader) do_split_packages(d, loaders_root, '^libpixbufloader-(.*)\.so$', 'gdk-pixbuf-loader-%s', 'GDK pixbuf loader for %s', postinst_pixbufloader)
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', True)): if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
} }

View File

@ -39,17 +39,17 @@ LIBV = "2.10.0"
PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*" PACKAGES_DYNAMIC += "gtk-immodule-* gtk-printbackend-*"
python populate_packages_prepend () { python populate_packages_prepend () {
import os.path import os.path
prologue = d.getVar("postinst_prologue", True) prologue = d.getVar("postinst_prologue", True)
gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}') gtk_libdir = d.expand('${libdir}/gtk-2.0/${LIBV}')
immodules_root = os.path.join(gtk_libdir, 'immodules') immodules_root = os.path.join(gtk_libdir, 'immodules')
printmodules_root = os.path.join(gtk_libdir, 'printbackends'); printmodules_root = os.path.join(gtk_libdir, 'printbackends');
do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules') do_split_packages(d, immodules_root, '^im-(.*)\.so$', 'gtk-immodule-%s', 'GTK input module for %s', prologue + 'gtk-query-immodules-2.0 > /etc/gtk-2.0/gtk.immodules')
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s') do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', True)): if (d.getVar('DEBIAN_NAMES', True)):
d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0') d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-2.0')
} }

View File

@ -29,14 +29,14 @@ CFLAGS_prepend = "-DHAVE_ANIMATION "
inherit gnome inherit gnome
python populate_packages_prepend() { python populate_packages_prepend() {
import os.path import os.path
engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines") engines_root = os.path.join(d.getVar('libdir', True), "gtk-2.0/2.10.0/engines")
themes_root = os.path.join(d.getVar('datadir', True), "themes") themes_root = os.path.join(d.getVar('datadir', True), "themes")
do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='') do_split_packages(d, engines_root, '^lib(.*)\.so$', 'gtk-engine-%s', 'GTK %s theme engine', extra_depends='')
do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='') do_split_packages(d, themes_root, '(.*)', 'gtk-theme-%s', 'GTK theme %s', allow_dirs=True, extra_depends='')
# TODO: mark theme packages as arch all # TODO: mark theme packages as arch all
} }
SRC_URI += "file://glib-2.32.patch" SRC_URI += "file://glib-2.32.patch"

View File

@ -1,6 +1,6 @@
def get_cairo_fpu_setting(bb, d): def get_cairo_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', True) in [ 'soft' ]: if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--disable-some-floating-point" return "--disable-some-floating-point"
return "" return ""

View File

@ -1,6 +1,6 @@
def get_clutter_fpu_setting(bb, d): def get_clutter_fpu_setting(bb, d):
if d.getVar('TARGET_FPU', True) in [ 'soft' ]: if d.getVar('TARGET_FPU', True) in [ 'soft' ]:
return "--without-fpu" return "--without-fpu"
return "" return ""

View File

@ -11,11 +11,11 @@ DRIDRIVERS_append_x86-64 = ",i915,i965"
EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gallium-drivers --with-dri-drivers=${DRIDRIVERS}" EXTRA_OECONF += "--with-driver=dri --disable-egl --disable-gallium --without-gallium-drivers --with-dri-drivers=${DRIDRIVERS}"
python populate_packages_prepend() { python populate_packages_prepend() {
import os.path import os.path
dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri") dri_drivers_root = os.path.join(d.getVar('libdir', True), "dri")
do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='') do_split_packages(d, dri_drivers_root, '^(.*)_dri\.so$', 'mesa-dri-driver-%s', 'Mesa %s DRI driver', extra_depends='')
} }
PACKAGES_DYNAMIC = "mesa-dri-driver-*" PACKAGES_DYNAMIC = "mesa-dri-driver-*"

View File

@ -61,11 +61,11 @@ do_install_append () {
python populate_packages_prepend () { python populate_packages_prepend () {
prologue = d.getVar("postinst_prologue", True) prologue = d.getVar("postinst_prologue", True)
modules_root = d.expand('${libdir}/pango/${LIBV}/modules') modules_root = d.expand('${libdir}/pango/${LIBV}/modules')
do_split_packages(d, modules_root, '^pango-(.*)\.so$', 'pango-module-%s', 'Pango module %s', prologue + '${bindir}/${MLPREFIX}pango-querymodules > /etc/pango/${MLPREFIX}pango.modules') do_split_packages(d, modules_root, '^pango-(.*)\.so$', 'pango-module-%s', 'Pango module %s', prologue + '${bindir}/${MLPREFIX}pango-querymodules > /etc/pango/${MLPREFIX}pango.modules')
} }
FILES_${PN} = "${sysconfdir}/pango/* ${bindir}/* ${libdir}/libpango*${SOLIBS}" FILES_${PN} = "${sysconfdir}/pango/* ${bindir}/* ${libdir}/libpango*${SOLIBS}"

View File

@ -18,5 +18,5 @@ def perf_feature_enabled(feature, trueval, falseval, d):
""" """
enabled_features = d.getVar("PERF_FEATURES_ENABLE", True) or "" enabled_features = d.getVar("PERF_FEATURES_ENABLE", True) or ""
if feature in enabled_features: if feature in enabled_features:
return trueval return trueval
return falseval return falseval

View File

@ -1,27 +1,27 @@
LIBV = "0.10" LIBV = "0.10"
python populate_packages_prepend () { python populate_packages_prepend () {
gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}') gst_libdir = d.expand('${libdir}/gstreamer-${LIBV}')
postinst = d.getVar('plugin_postinst', True) postinst = d.getVar('plugin_postinst', True)
glibdir = d.getVar('libdir', True) glibdir = d.getVar('libdir', True)
do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True) do_split_packages(d, glibdir, '^lib(.*)\.so\.*', 'lib%s', 'gstreamer %s library', extra_depends='', allow_links=True)
do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends=d.expand('${PN}')) do_split_packages(d, gst_libdir, 'libgst(.*)\.so$', d.expand('${PN}-%s'), 'GStreamer plugin for %s', postinst=postinst, extra_depends=d.expand('${PN}'))
do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', d.expand('${PN}-%s-dev'), 'GStreamer plugin for %s (development files)', extra_depends=d.expand('${PN}-dev')) do_split_packages(d, gst_libdir, 'libgst(.*)\.la$', d.expand('${PN}-%s-dev'), 'GStreamer plugin for %s (development files)', extra_depends=d.expand('${PN}-dev'))
do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', d.expand('${PN}-%s-staticdev'), 'GStreamer plugin for %s (static development files)', extra_depends=d.expand('${PN}-staticdev')) do_split_packages(d, gst_libdir, 'libgst(.*)\.a$', d.expand('${PN}-%s-staticdev'), 'GStreamer plugin for %s (static development files)', extra_depends=d.expand('${PN}-staticdev'))
pn = d.getVar('PN', True) pn = d.getVar('PN', True)
metapkg = pn + '-meta' metapkg = pn + '-meta'
d.setVar('ALLOW_EMPTY_' + metapkg, "1") d.setVar('ALLOW_EMPTY_' + metapkg, "1")
d.setVar('FILES_' + metapkg, "") d.setVar('FILES_' + metapkg, "")
blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ] blacklist = [ pn + '-locale', pn + '-dev', pn + '-dbg', pn + '-doc' ]
metapkg_rdepends = [] metapkg_rdepends = []
packages = d.getVar('PACKAGES', True).split() packages = d.getVar('PACKAGES', True).split()
for pkg in packages[1:]: for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'): if not pkg in blacklist and not pkg in metapkg_rdepends and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.count('locale') and not pkg.count('-staticdev'):
metapkg_rdepends.append(pkg) metapkg_rdepends.append(pkg)
d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends)) d.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends))
d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package') d.setVar('DESCRIPTION_' + metapkg, pn + ' meta package')
} }
ALLOW_EMPTY = "1" ALLOW_EMPTY = "1"

View File

@ -74,10 +74,10 @@ FILES_libpulse = "${libdir}/libpulse.so.*"
FILES_libpulse-simple = "${libdir}/libpulse-simple.so.*" FILES_libpulse-simple = "${libdir}/libpulse-simple.so.*"
FILES_libpulse-browse = "${libdir}/libpulse-browse.so.*" FILES_libpulse-browse = "${libdir}/libpulse-browse.so.*"
FILES_libpulse-mainloop-glib = "${libdir}/libpulse-mainloop-glib.so.*" FILES_libpulse-mainloop-glib = "${libdir}/libpulse-mainloop-glib.so.*"
FILES_${PN}-dbg += "${libexecdir}/pulse/.debug \ FILES_${PN}-dbg += "${libexecdir}/pulse/.debug \
${libdir}/pulse-${PV}/modules/.debug" ${libdir}/pulse-${PV}/modules/.debug"
FILES_${PN}-dev += "${libdir}/pulse-${PV}/modules/*.la ${datadir}/vala ${libdir}/cmake" FILES_${PN}-dev += "${libdir}/pulse-${PV}/modules/*.la ${datadir}/vala ${libdir}/cmake"
FILES_${PN}-conf = "${sysconfdir}" FILES_${PN}-conf = "${sysconfdir}"
FILES_${PN}-bin += "${sysconfdir}/default/volatiles/volatiles.04_pulse" FILES_${PN}-bin += "${sysconfdir}/default/volatiles/volatiles.04_pulse"
FILES_${PN}-server = "${bindir}/pulseaudio ${bindir}/start-* ${sysconfdir} ${bindir}/pactl ${base_libdir}/udev/rules.d/*.rules" FILES_${PN}-server = "${bindir}/pulseaudio ${bindir}/start-* ${sysconfdir} ${bindir}/pactl ${base_libdir}/udev/rules.d/*.rules"
@ -105,11 +105,11 @@ pkg_postinst_${PN}-server() {
} }
python populate_packages_prepend() { python populate_packages_prepend() {
#d.setVar('PKG_pulseaudio', 'pulseaudio') #d.setVar('PKG_pulseaudio', 'pulseaudio')
plugindir = d.expand('${libdir}/pulse-${PV}/modules/') plugindir = d.expand('${libdir}/pulse-${PV}/modules/')
do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' ) do_split_packages(d, plugindir, '^module-(.*)\.so$', 'pulseaudio-module-%s', 'PulseAudio module for %s', extra_depends='' )
do_split_packages(d, plugindir, '^lib(.*)\.so$', 'pulseaudio-lib-%s', 'PulseAudio library for %s', extra_depends='' ) do_split_packages(d, plugindir, '^lib(.*)\.so$', 'pulseaudio-lib-%s', 'PulseAudio library for %s', extra_depends='' )
} }
RDEPENDS_pulseaudio-module-console-kit =+ "consolekit" RDEPENDS_pulseaudio-module-console-kit =+ "consolekit"

View File

@ -30,23 +30,23 @@ PACKAGES_DYNAMIC = "qmmp-plugin-* "
python populate_packages_prepend () { python populate_packages_prepend () {
import os import os
qmmp_libdir = d.expand('${libdir}/qmmp') qmmp_libdir = d.expand('${libdir}/qmmp')
gd = d.expand('${D}/${libdir}/qmmp') gd = d.expand('${D}/${libdir}/qmmp')
plug_dirs = os.listdir(gd) plug_dirs = os.listdir(gd)
for plug_dir in plug_dirs: for plug_dir in plug_dirs:
g_plug_dir = os.path.join(qmmp_libdir,plug_dir) g_plug_dir = os.path.join(qmmp_libdir,plug_dir)
do_split_packages(d, g_plug_dir, '^lib(.*)\.so$', 'qmmp-plugin-' + plug_dir.lower() + '-%s', 'Qmmp' + plug_dir + 'plugin for %s') do_split_packages(d, g_plug_dir, '^lib(.*)\.so$', 'qmmp-plugin-' + plug_dir.lower() + '-%s', 'Qmmp' + plug_dir + 'plugin for %s')
} }
FILES_${PN} = "\ FILES_${PN} = "\
${bindir}/qmmp \ ${bindir}/qmmp \
${libdir}/lib*${SOLIBS} \ ${libdir}/lib*${SOLIBS} \
${datadir}/icons/* \ ${datadir}/icons/* \
${datadir}/qmmp/images/* \ ${datadir}/qmmp/images/* \
${datadir}/applications/* \ ${datadir}/applications/* \
" "
FILES_${PN}-dbg += "\ FILES_${PN}-dbg += "\
${libdir}/qmmp/*/.debug/* \ ${libdir}/qmmp/*/.debug/* \

View File

@ -274,59 +274,59 @@ do_compile() {
} }
python populate_packages_prepend() { python populate_packages_prepend() {
translation_dir = d.expand('${datadir}/${QT_DIR_NAME}/translations/') translation_dir = d.expand('${datadir}/${QT_DIR_NAME}/translations/')
translation_name = d.expand('${QT_BASE_NAME}-translation-%s') translation_name = d.expand('${QT_BASE_NAME}-translation-%s')
do_split_packages(d, translation_dir, '^(assistant|designer|linguist|qt|qtconfig|qvfb)_(.*)\.qm$', translation_name, '${PN} translation for %s', extra_depends='' ) do_split_packages(d, translation_dir, '^(assistant|designer|linguist|qt|qtconfig|qvfb)_(.*)\.qm$', translation_name, '${PN} translation for %s', extra_depends='' )
phrasebook_dir = d.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/') phrasebook_dir = d.expand('${datadir}/${QT_DIR_NAME}/phrasebooks/')
phrasebook_name = d.expand('${QT_BASE_NAME}-phrasebook-%s') phrasebook_name = d.expand('${QT_BASE_NAME}-phrasebook-%s')
import os; import os;
if os.path.exists("%s%s" % (d.expand('${D}'), phrasebook_dir)): if os.path.exists("%s%s" % (d.expand('${D}'), phrasebook_dir)):
do_split_packages(d, phrasebook_dir, '^(.*)\.qph$', phrasebook_name, '${PN} phrasebook for %s', extra_depends='' ) do_split_packages(d, phrasebook_dir, '^(.*)\.qph$', phrasebook_name, '${PN} phrasebook for %s', extra_depends='' )
else: else:
bb.note("The path does not exist:", d.expand('${D}'), phrasebook_dir) bb.note("The path does not exist:", d.expand('${D}'), phrasebook_dir)
# Package all the plugins and their -dbg version and create a meta package # Package all the plugins and their -dbg version and create a meta package
def qtopia_split(path, name, glob): def qtopia_split(path, name, glob):
""" """
Split the package into a normal and -dbg package and then add the Split the package into a normal and -dbg package and then add the
new packages to the meta package. new packages to the meta package.
""" """
plugin_dir = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path) plugin_dir = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/' % path)
if not os.path.exists("%s%s" % (d.expand('${D}'), plugin_dir)): if not os.path.exists("%s%s" % (d.expand('${D}'), plugin_dir)):
bb.note("The path does not exist:", d.expand('${D}'), plugin_dir) bb.note("The path does not exist:", d.expand('${D}'), plugin_dir)
return return
plugin_name = d.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name) plugin_name = d.expand('${QT_BASE_NAME}-plugin-%s-%%s' % name)
dev_packages = [] dev_packages = []
dev_hook = lambda file,pkg,b,c,d:dev_packages.append((file,pkg)) dev_hook = lambda file,pkg,b,c,d:dev_packages.append((file,pkg))
do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook) do_split_packages(d, plugin_dir, glob, plugin_name, '${PN} %s for %%s' % name, extra_depends='', hook=dev_hook)
# Create a -dbg package as well # Create a -dbg package as well
plugin_dir_dbg = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path) plugin_dir_dbg = d.expand('${libdir}/${QT_DIR_NAME}/plugins/%s/.debug' % path)
packages = d.getVar('PACKAGES') packages = d.getVar('PACKAGES')
for (file,package) in dev_packages: for (file,package) in dev_packages:
packages = "%s %s-dbg" % (packages, package) packages = "%s %s-dbg" % (packages, package)
file_name = os.path.join(plugin_dir_dbg, os.path.basename(file)) file_name = os.path.join(plugin_dir_dbg, os.path.basename(file))
d.setVar("FILES_%s-dbg" % package, file_name) d.setVar("FILES_%s-dbg" % package, file_name)
d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package)) d.setVar("DESCRIPTION_%s-dbg" % package, "${PN} %s for %s" % (name, package))
d.setVar('PACKAGES', packages) d.setVar('PACKAGES', packages)
qtopia_split('accessible', 'accessible', '^libq(.*)\.so$') qtopia_split('accessible', 'accessible', '^libq(.*)\.so$')
qtopia_split('codecs', 'codec', '^libq(.*)\.so$') qtopia_split('codecs', 'codec', '^libq(.*)\.so$')
qtopia_split('decorations', 'decoration', '^libqdecoration(.*)\.so$') qtopia_split('decorations', 'decoration', '^libqdecoration(.*)\.so$')
qtopia_split('designer', 'designer', '^lib(.*)\.so$') qtopia_split('designer', 'designer', '^lib(.*)\.so$')
qtopia_split('gfxdrivers', 'gfxdriver', '^libq(.*)\.so$') qtopia_split('gfxdrivers', 'gfxdriver', '^libq(.*)\.so$')
qtopia_split('graphicssystems','graphicssystems', '^libq(.*)\.so$') qtopia_split('graphicssystems','graphicssystems', '^libq(.*)\.so$')
qtopia_split('mousedrivers', 'mousedriver', '^libq(.*)mousedriver\.so$') qtopia_split('mousedrivers', 'mousedriver', '^libq(.*)mousedriver\.so$')
qtopia_split('iconengines', 'iconengine', '^libq(.*)\.so$') qtopia_split('iconengines', 'iconengine', '^libq(.*)\.so$')
qtopia_split('imageformats', 'imageformat', '^libq(.*)\.so$') qtopia_split('imageformats', 'imageformat', '^libq(.*)\.so$')
qtopia_split('inputmethods', 'inputmethod', '^libq(.*)\.so$') qtopia_split('inputmethods', 'inputmethod', '^libq(.*)\.so$')
qtopia_split('sqldrivers', 'sqldriver', '^libq(.*)\.so$') qtopia_split('sqldrivers', 'sqldriver', '^libq(.*)\.so$')
qtopia_split('script', 'script', '^libqtscript(.*)\.so$') qtopia_split('script', 'script', '^libqtscript(.*)\.so$')
qtopia_split('styles', 'style', '^libq(.*)\.so$') qtopia_split('styles', 'style', '^libq(.*)\.so$')
qtopia_split('phonon_backend','phonon-backend','^libphonon_(.*)\.so$') qtopia_split('phonon_backend','phonon-backend','^libphonon_(.*)\.so$')
qtopia_split('bearer', 'bearer', '^libq(.*)bearer\.so$') qtopia_split('bearer', 'bearer', '^libq(.*)bearer\.so$')
} }
do_install() { do_install() {

View File

@ -17,7 +17,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
S = "${WORKDIR}/git" S = "${WORKDIR}/git"
do_unpack_append () { do_unpack_append () {
bb.build.exec_func('do_remove_patches', d) bb.build.exec_func('do_remove_patches', d)
} }
do_remove_patches () { do_remove_patches () {

View File

@ -46,8 +46,8 @@ do_compile () {
} }
python populate_packages_prepend () { python populate_packages_prepend () {
pcre_libdir = d.expand('${libdir}') pcre_libdir = d.expand('${libdir}')
do_split_packages(d, pcre_libdir, '^lib(.*)\.so\.+', 'lib%s', 'libpcre %s library', extra_depends='', allow_links=True, prepend=True) do_split_packages(d, pcre_libdir, '^lib(.*)\.so\.+', 'lib%s', 'libpcre %s library', extra_depends='', allow_links=True, prepend=True)
} }
BBCLASSEXTEND = "native" BBCLASSEXTEND = "native"