Initial population

git-svn-id: https://svn.o-hand.com/repos/poky@1 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
Richard Purdie 2005-08-31 10:45:47 +00:00
commit 4b46c1f6e8
1032 changed files with 232488 additions and 0 deletions

View File

@ -0,0 +1,153 @@
inherit base
def autotools_dep_prepend(d):
import bb;
if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1):
return ''
pn = bb.data.getVar('PN', d, 1)
deps = ''
if pn in ['autoconf-native', 'automake-native']:
return deps
deps += 'autoconf-native automake-native '
if not pn in ['libtool', 'libtool-native', 'libtool-cross']:
deps += 'libtool-native '
return deps + 'gnu-config-native '
EXTRA_OEMAKE = ""
DEPENDS_prepend = "${@autotools_dep_prepend(d)}"
acpaths = "default"
EXTRA_AUTORECONF = "--exclude=autopoint"
def autotools_set_crosscompiling(d):
import bb
if not bb.data.inherits_class('native', d):
return " cross_compiling=yes"
return ""
# EXTRA_OECONF_append = "${@autotools_set_crosscompiling(d)}"
oe_runconf () {
if [ -x ${S}/configure ] ; then
cfgcmd="${S}/configure \
--build=${BUILD_SYS} \
--host=${HOST_SYS} \
--target=${TARGET_SYS} \
--prefix=${prefix} \
--exec_prefix=${exec_prefix} \
--bindir=${bindir} \
--sbindir=${sbindir} \
--libexecdir=${libexecdir} \
--datadir=${datadir} \
--sysconfdir=${sysconfdir} \
--sharedstatedir=${sharedstatedir} \
--localstatedir=${localstatedir} \
--libdir=${libdir} \
--includedir=${includedir} \
--oldincludedir=${oldincludedir} \
--infodir=${infodir} \
--mandir=${mandir} \
${EXTRA_OECONF} \
$@"
oenote "Running $cfgcmd..."
$cfgcmd || oefatal "oe_runconf failed"
else
oefatal "no configure script found"
fi
}
autotools_do_configure() {
case ${PN} in
autoconf*)
;;
automake*)
;;
*)
# WARNING: gross hack follows:
# An autotools built package generally needs these scripts, however only
# automake or libtoolize actually install the current versions of them.
# This is a problem in builds that do not use libtool or automake, in the case
# where we -need- the latest version of these scripts. e.g. running a build
# for a package whose autotools are old, on an x86_64 machine, which the old
# config.sub does not support. Work around this by installing them manually
# regardless.
( for ac in `find ${S} -name configure.in -o -name configure.ac`; do
rm -f `dirname $ac`/configure
done )
if [ -e ${S}/configure.in -o -e ${S}/configure.ac ]; then
olddir=`pwd`
cd ${S}
if [ x"${acpaths}" = xdefault ]; then
acpaths=
for i in `find ${S} -maxdepth 2 -name \*.m4|grep -v 'aclocal.m4'| \
grep -v 'acinclude.m4' | sed -e 's,\(.*/\).*$,\1,'|sort -u`; do
acpaths="$acpaths -I $i"
done
else
acpaths="${acpaths}"
fi
AUTOV=`automake --version |head -n 1 |sed "s/.* //;s/\.[0-9]\+$//"`
automake --version
echo "AUTOV is $AUTOV"
install -d ${STAGING_DIR}/${HOST_SYS}/share/aclocal
install -d ${STAGING_DIR}/${HOST_SYS}/share/aclocal-$AUTOV
acpaths="$acpaths -I ${STAGING_DIR}/${HOST_SYS}/share/aclocal-$AUTOV -I ${STAGING_DIR}/${HOST_SYS}/share/aclocal"
# autoreconf is too shy to overwrite aclocal.m4 if it doesn't look
# like it was auto-generated. Work around this by blowing it away
# by hand, unless the package specifically asked not to run aclocal.
if ! echo ${EXTRA_AUTORECONF} | grep -q "aclocal"; then
rm -f aclocal.m4
fi
if [ -e configure.in ]; then
CONFIGURE_AC=configure.in
else
CONFIGURE_AC=configure.ac
fi
if grep "^AM_GLIB_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then
if grep "sed.*POTFILES" $CONFIGURE_AC >/dev/null; then
: do nothing -- we still have an old unmodified configure.ac
else
oenote Executing glib-gettextize --force --copy
echo "no" | glib-gettextize --force --copy
fi
fi
if grep "^AC_PROG_INTLTOOL" $CONFIGURE_AC >/dev/null; then
oenote Executing intltoolize --copy --force --automake
intltoolize --copy --force --automake
fi
oenote Executing autoreconf --verbose --install --force ${EXTRA_AUTORECONF} $acpaths
mkdir -p m4
autoreconf -Wcross --verbose --install --force ${EXTRA_AUTORECONF} $acpaths || oefatal "autoreconf execution failed."
cd $olddir
fi
;;
esac
if [ -e ${S}/configure ]; then
oe_runconf
else
oenote "nothing to configure"
fi
}
autotools_do_install() {
oe_runmake 'DESTDIR=${D}' install
}
STAGE_TEMP="${WORKDIR}/temp-staging"
autotools_stage_includes() {
if [ "${INHIBIT_AUTO_STAGE_INCLUDES}" != "1" ]
then
rm -rf ${STAGE_TEMP}
mkdir -p ${STAGE_TEMP}
make DESTDIR="${STAGE_TEMP}" install
cp -a ${STAGE_TEMP}/${includedir}/* ${STAGING_INCDIR}
rm -rf ${STAGE_TEMP}
fi
}
EXPORT_FUNCTIONS do_configure do_install

View File

@ -0,0 +1,801 @@
PATCHES_DIR="${S}"
def base_dep_prepend(d):
import bb;
#
# Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in
# that case though.
#
deps = ""
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
patchdeps = bb.data.getVar("PATCH_DEPENDS", d, 1)
if patchdeps and not patchdeps in bb.data.getVar("PROVIDES", d, 1):
deps = patchdeps
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
if (bb.data.getVar('HOST_SYS', d, 1) !=
bb.data.getVar('BUILD_SYS', d, 1)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
return deps
def base_read_file(filename):
import bb
try:
f = file( filename, "r" )
except IOError, reason:
raise bb.build.FuncFailed("can't read from file '%s' (%s)", (filename,reason))
else:
return f.read().strip()
return None
def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
import bb
if bb.data.getVar(variable,d,1) == checkvalue:
return truevalue
else:
return falsevalue
DEPENDS_prepend="${@base_dep_prepend(d)} "
def base_set_filespath(path, d):
import os, bb
filespath = []
for p in path:
overrides = bb.data.getVar("OVERRIDES", d, 1) or ""
overrides = overrides + ":"
for o in overrides.split(":"):
filespath.append(os.path.join(p, o))
bb.data.setVar("FILESPATH", ":".join(filespath), d)
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
def oe_filter(f, str, d):
from re import match
return " ".join(filter(lambda x: match(f, x, 0), str.split()))
def oe_filter_out(f, str, d):
from re import match
return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
die() {
oefatal "$*"
}
oenote() {
echo "NOTE:" "$*"
}
oewarn() {
echo "WARNING:" "$*"
}
oefatal() {
echo "FATAL:" "$*"
exit 1
}
oedebug() {
test $# -ge 2 || {
echo "Usage: oedebug level \"message\""
exit 1
}
test ${OEDEBUG:-0} -ge $1 && {
shift
echo "DEBUG:" $*
}
}
oe_runmake() {
if [ x"$MAKE" = x ]; then MAKE=make; fi
oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
}
oe_soinstall() {
# Purpose: Install shared library file and
# create the necessary links
# Example:
#
# oe_
#
#oenote installing shared library $1 to $2
#
libname=`basename $1`
install -m 755 $1 $2/$libname
sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
ln -sf $libname $2/$sonamelink
ln -sf $libname $2/$solink
}
oe_libinstall() {
# Purpose: Install a library, in all its forms
# Example
#
# oe_libinstall libltdl ${STAGING_LIBDIR}/
# oe_libinstall -C src/libblah libblah ${D}/${libdir}/
dir=""
libtool=""
silent=""
require_static=""
require_shared=""
while [ "$#" -gt 0 ]; do
case "$1" in
-C)
shift
dir="$1"
;;
-s)
silent=1
;;
-a)
require_static=1
;;
-so)
require_shared=1
;;
-*)
oefatal "oe_libinstall: unknown option: $1"
;;
*)
break;
;;
esac
shift
done
libname="$1"
shift
destpath="$1"
if [ -z "$destpath" ]; then
oefatal "oe_libinstall: no destination path specified"
fi
__runcmd () {
if [ -z "$silent" ]; then
echo >&2 "oe_libinstall: $*"
fi
$*
}
if [ -z "$dir" ]; then
dir=`pwd`
fi
if [ -d "$dir/.libs" ]; then
dir=$dir/.libs
fi
olddir=`pwd`
__runcmd cd $dir
lafile=$libname.la
if [ -f "$lafile" ]; then
# libtool archive
eval `cat $lafile|grep "^library_names="`
libtool=1
else
library_names="$libname.so* $libname.dll.a"
fi
__runcmd install -d $destpath/
dota=$libname.a
if [ -f "$dota" -o -n "$require_static" ]; then
__runcmd install -m 0644 $dota $destpath/
fi
dotlai=$libname.lai
if [ -f "$dotlai" -a -n "$libtool" ]; then
__runcmd install -m 0644 $dotlai $destpath/$libname.la
fi
for name in $library_names; do
files=`eval echo $name`
for f in $files; do
if [ ! -e "$f" ]; then
if [ -n "$libtool" ]; then
oefatal "oe_libinstall: $dir/$f not found."
fi
elif [ -L "$f" ]; then
__runcmd cp -P "$f" $destpath/
elif [ ! -L "$f" ]; then
libfile="$f"
__runcmd install -m 0755 $libfile $destpath/
fi
done
done
if [ -z "$libfile" ]; then
if [ -n "$require_shared" ]; then
oefatal "oe_libinstall: unable to locate shared library"
fi
elif [ -z "$libtool" ]; then
# special case hack for non-libtool .so.#.#.# links
baselibfile=`basename "$libfile"`
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
__runcmd ln -sf $baselibfile $destpath/$sonamelink
fi
__runcmd ln -sf $baselibfile $destpath/$solink
fi
fi
__runcmd cd "$olddir"
}
oe_machinstall() {
# Purpose: Install machine dependent files, if available
# If not available, check if there is a default
# If no default, just touch the destination
# Example:
# $1 $2 $3 $4
# oe_machinstall -m 0644 fstab ${D}/etc/fstab
#
# TODO: Check argument number?
#
filename=`basename $3`
dirname=`dirname $3`
for o in `echo ${OVERRIDES} | tr ':' ' '`; do
if [ -e $dirname/$o/$filename ]; then
oenote $dirname/$o/$filename present, installing to $4
install $1 $2 $dirname/$o/$filename $4
return
fi
done
# oenote overrides specific file NOT present, trying default=$3...
if [ -e $3 ]; then
oenote $3 present, installing to $4
install $1 $2 $3 $4
else
oenote $3 NOT present, touching empty $4
touch $4
fi
}
addtask showdata
do_showdata[nostamp] = "1"
python do_showdata() {
import sys
# emit variables and shell functions
bb.data.emit_env(sys.__stdout__, d, True)
# emit the metadata which isnt valid shell
for e in d.keys():
if bb.data.getVarFlag(e, 'python', d):
sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
}
addtask listtasks
do_listtasks[nostamp] = "1"
python do_listtasks() {
import sys
# emit variables and shell functions
#bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell
for e in d.keys():
if bb.data.getVarFlag(e, 'task', d):
sys.__stdout__.write("%s\n" % e)
}
addtask clean
do_clean[dirs] = "${TOPDIR}"
do_clean[nostamp] = "1"
do_clean[bbdepcmd] = ""
python base_do_clean() {
"""clear the build and temp directories"""
dir = bb.data.expand("${WORKDIR}", d)
if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
bb.note("removing " + dir)
os.system('rm -rf ' + dir)
dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
bb.note("removing " + dir)
os.system('rm -f '+ dir)
}
addtask mrproper
do_mrproper[dirs] = "${TOPDIR}"
do_mrproper[nostamp] = "1"
do_mrproper[bbdepcmd] = ""
python base_do_mrproper() {
"""clear downloaded sources, build and temp directories"""
dir = bb.data.expand("${DL_DIR}", d)
if dir == '/': bb.build.FuncFailed("wrong DATADIR")
bb.debug(2, "removing " + dir)
os.system('rm -rf ' + dir)
bb.build.exec_task('do_clean', d)
}
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
do_fetch[nostamp] = "1"
python base_do_fetch() {
import sys
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
src_uri = bb.data.getVar('SRC_URI', localdata, 1)
if not src_uri:
return 1
try:
bb.fetch.init(src_uri.split(),d)
except bb.fetch.NoMethodError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("No method: %s" % value)
try:
bb.fetch.go(localdata)
except bb.fetch.MissingParameterError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Missing parameters: %s" % value)
except bb.fetch.FetchError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Fetch failed: %s" % value)
}
def oe_unpack_file(file, data, url = None):
import bb, os
if not url:
url = "file://%s" % file
dots = file.split(".")
if dots[-1] in ['gz', 'bz2', 'Z']:
efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
else:
efile = file
cmd = None
if file.endswith('.tar'):
cmd = 'tar x --no-same-owner -f %s' % file
elif file.endswith('.tgz') or file.endswith('.tar.gz'):
cmd = 'tar xz --no-same-owner -f %s' % file
elif file.endswith('.tbz') or file.endswith('.tar.bz2'):
cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
cmd = 'gzip -dc %s > %s' % (file, efile)
elif file.endswith('.bz2'):
cmd = 'bzip2 -dc %s > %s' % (file, efile)
elif file.endswith('.zip'):
cmd = 'unzip -q %s' % file
elif os.path.isdir(file):
filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
destdir = "."
if file[0:len(filesdir)] == filesdir:
destdir = file[len(filesdir):file.rfind('/')]
destdir = destdir.strip('/')
if len(destdir) < 1:
destdir = "."
elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
os.makedirs("%s/%s" % (os.getcwd(), destdir))
cmd = 'cp -a %s %s/%s/' % (file, os.getcwd(), destdir)
else:
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
if not 'patch' in parm:
# The "destdir" handling was specifically done for FILESPATH
# items. So, only do so for file:// entries.
if type == "file":
destdir = bb.decodeurl(url)[1] or "."
else:
destdir = "."
bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
if not cmd:
return True
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
ret = os.system(cmd)
return ret == 0
addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}"
python base_do_unpack() {
import re, os
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
src_uri = bb.data.getVar('SRC_URI', localdata)
if not src_uri:
return
src_uri = bb.data.expand(src_uri, localdata)
for url in src_uri.split():
try:
local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
except bb.MalformedUrl, e:
raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
# dont need any parameters for extraction, strip them off
local = re.sub(';.*$', '', local)
local = os.path.realpath(local)
ret = oe_unpack_file(local, localdata, url)
if not ret:
raise bb.build.FuncFailed()
}
addtask patch after do_unpack
do_patch[dirs] = "${WORKDIR}"
python base_do_patch() {
import re
import bb.fetch
src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split()
if not src_uri:
return
patchcleancmd = bb.data.getVar('PATCHCLEANCMD', d, 1)
if patchcleancmd:
bb.data.setVar("do_patchcleancmd", patchcleancmd, d)
bb.data.setVarFlag("do_patchcleancmd", "func", 1, d)
bb.build.exec_func("do_patchcleancmd", d)
workdir = bb.data.getVar('WORKDIR', d, 1)
for url in src_uri:
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
if not "patch" in parm:
continue
bb.fetch.init([url], d)
url = bb.encodeurl((type, host, path, user, pswd, []))
local = os.path.join('/', bb.fetch.localpath(url, d))
# did it need to be unpacked?
dots = os.path.basename(local).split(".")
if dots[-1] in ['gz', 'bz2', 'Z']:
unpacked = os.path.join(bb.data.getVar('WORKDIR', d),'.'.join(dots[0:-1]))
else:
unpacked = local
unpacked = bb.data.expand(unpacked, d)
if "pnum" in parm:
pnum = parm["pnum"]
else:
pnum = "1"
if "pname" in parm:
pname = parm["pname"]
else:
pname = os.path.basename(unpacked)
bb.note("Applying patch '%s'" % pname)
bb.data.setVar("do_patchcmd", bb.data.getVar("PATCHCMD", d, 1) % (pnum, pname, unpacked), d)
bb.data.setVarFlag("do_patchcmd", "func", 1, d)
bb.data.setVarFlag("do_patchcmd", "dirs", "${WORKDIR} ${S}", d)
bb.build.exec_func("do_patchcmd", d)
}
addhandler base_eventhandler
python base_eventhandler() {
from bb import note, error, data
from bb.event import Handled, NotHandled, getName
import os
messages = {}
messages["Completed"] = "completed"
messages["Succeeded"] = "completed"
messages["Started"] = "started"
messages["Failed"] = "failed"
name = getName(e)
msg = ""
if name.startswith("Pkg"):
msg += "package %s: " % data.getVar("P", e.data, 1)
msg += messages.get(name[3:]) or name[3:]
elif name.startswith("Task"):
msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task)
msg += messages.get(name[4:]) or name[4:]
elif name.startswith("Build"):
msg += "build %s: " % e.name
msg += messages.get(name[5:]) or name[5:]
elif name == "UnsatisfiedDep":
msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
note(msg)
if name.startswith("BuildStarted"):
statusvars = ['TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO',
'TARGET_FPU']
statuslines = ["%-13s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
print statusmsg
needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
pesteruser = []
for v in needed_vars:
val = bb.data.getVar(v, e.data, 1)
if not val or val == 'INVALID':
pesteruser.append(v)
if pesteruser:
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
if not data in e.__dict__:
return NotHandled
log = data.getVar("EVENTLOG", e.data, 1)
if log:
logfile = file(log, "a")
logfile.write("%s\n" % msg)
logfile.close()
return NotHandled
}
addtask configure after do_unpack do_patch
do_configure[dirs] = "${S} ${B}"
do_configure[bbdepcmd] = "do_populate_staging"
base_do_configure() {
:
}
addtask compile after do_configure
do_compile[dirs] = "${S} ${B}"
do_compile[bbdepcmd] = "do_populate_staging"
base_do_compile() {
if [ -e Makefile -o -e makefile ]; then
oe_runmake || die "make failed"
else
oenote "nothing to compile"
fi
}
addtask stage after do_compile
base_do_stage () {
:
}
do_populate_staging[dirs] = "${STAGING_DIR}/${TARGET_SYS}/bin ${STAGING_DIR}/${TARGET_SYS}/lib \
${STAGING_DIR}/${TARGET_SYS}/include \
${STAGING_DIR}/${BUILD_SYS}/bin ${STAGING_DIR}/${BUILD_SYS}/lib \
${STAGING_DIR}/${BUILD_SYS}/include \
${STAGING_DATADIR} \
${S} ${B}"
addtask populate_staging after do_compile
#python do_populate_staging () {
# if not bb.data.getVar('manifest', d):
# bb.build.exec_func('do_emit_manifest', d)
# if bb.data.getVar('do_stage', d):
# bb.build.exec_func('do_stage', d)
# else:
# bb.build.exec_func('manifest_do_populate_staging', d)
#}
python do_populate_staging () {
if bb.data.getVar('manifest_do_populate_staging', d):
bb.build.exec_func('manifest_do_populate_staging', d)
else:
bb.build.exec_func('do_stage', d)
}
#addtask install
addtask install after do_compile
do_install[dirs] = "${S} ${B}"
base_do_install() {
:
}
#addtask populate_pkgs after do_compile
#python do_populate_pkgs () {
# if not bb.data.getVar('manifest', d):
# bb.build.exec_func('do_emit_manifest', d)
# bb.build.exec_func('manifest_do_populate_pkgs', d)
# bb.build.exec_func('package_do_shlibs', d)
#}
base_do_package() {
:
}
addtask build after do_populate_staging
do_build = ""
do_build[func] = "1"
# Functions that update metadata based on files outputted
# during the build process.
SHLIBS = ""
RDEPENDS_prepend = " ${SHLIBS}"
python read_manifest () {
import sys
mfn = bb.data.getVar("MANIFEST", d, 1)
if os.access(mfn, os.R_OK):
# we have a manifest, so emit do_stage and do_populate_pkgs,
# and stuff some additional bits of data into the metadata store
mfile = file(mfn, "r")
manifest = bb.manifest.parse(mfile, d)
if not manifest:
return
bb.data.setVar('manifest', manifest, d)
}
python parse_manifest () {
manifest = bb.data.getVar("manifest", d)
if not manifest:
return
for func in ("do_populate_staging", "do_populate_pkgs"):
value = bb.manifest.emit(func, manifest, d)
if value:
bb.data.setVar("manifest_" + func, value, d)
bb.data.delVarFlag("manifest_" + func, "python", d)
bb.data.delVarFlag("manifest_" + func, "fakeroot", d)
bb.data.setVarFlag("manifest_" + func, "func", 1, d)
packages = []
for l in manifest:
if "pkg" in l and l["pkg"] is not None:
packages.append(l["pkg"])
bb.data.setVar("PACKAGES", " ".join(packages), d)
}
def explode_deps(s):
r = []
l = s.split()
flag = False
for i in l:
if i[0] == '(':
flag = True
j = []
if flag:
j.append(i)
if i.endswith(')'):
flag = False
r[-1] += ' ' + ' '.join(j)
else:
r.append(i)
return r
python read_shlibdeps () {
packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
for pkg in packages:
rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 0) or bb.data.getVar('RDEPENDS', d, 0) or "")
shlibsfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".shlibdeps", d)
if os.access(shlibsfile, os.R_OK):
fd = file(shlibsfile)
lines = fd.readlines()
fd.close()
for l in lines:
rdepends.append(l.rstrip())
pcfile = bb.data.expand("${WORKDIR}/install/" + pkg + ".pcdeps", d)
if os.access(pcfile, os.R_OK):
fd = file(pcfile)
lines = fd.readlines()
fd.close()
for l in lines:
rdepends.append(l.rstrip())
bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d)
}
python read_subpackage_metadata () {
import re
def decode(str):
import codecs
c = codecs.getdecoder("string_escape")
return c(str)[0]
data_file = bb.data.expand("${WORKDIR}/install/${PN}.package", d)
if os.access(data_file, os.R_OK):
f = file(data_file, 'r')
lines = f.readlines()
f.close()
r = re.compile("([^:]+):\s*(.*)")
for l in lines:
m = r.match(l)
if m:
bb.data.setVar(m.group(1), decode(m.group(2)), d)
}
python __anonymous () {
import exceptions
need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
if need_host:
import re
this_host = bb.data.getVar('HOST_SYS', d, 1)
if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
pn = bb.data.getVar('PN', d, 1)
cvsdate = bb.data.getVar('CVSDATE_%s' % pn, d, 1)
if cvsdate != None:
bb.data.setVar('CVSDATE', cvsdate, d)
use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
if use_nls != None:
bb.data.setVar('USE_NLS', use_nls, d)
try:
bb.build.exec_func('read_manifest', d)
bb.build.exec_func('parse_manifest', d)
except exceptions.KeyboardInterrupt:
raise
except Exception, e:
bb.error("anonymous function: %s" % e)
pass
}
python () {
import bb, os
mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
old_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
if (old_arch == mach_arch):
# Nothing to do
return
if (bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1) == '0'):
return
paths = []
for p in [ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ]:
paths.append(bb.data.expand(os.path.join(p, mach_arch), d))
for s in bb.data.getVar('SRC_URI', d, 1).split():
local = bb.data.expand(bb.fetch.localpath(s, d), d)
for mp in paths:
if local.startswith(mp):
# bb.note("overriding PACKAGE_ARCH from %s to %s" % (old_arch, mach_arch))
bb.data.setVar('PACKAGE_ARCH', mach_arch, d)
return
}
addtask emit_manifest
python do_emit_manifest () {
# FIXME: emit a manifest here
# 1) adjust PATH to hit the wrapper scripts
wrappers = bb.which(bb.data.getVar("BBPATH", d, 1), 'build/install', 0)
path = (bb.data.getVar('PATH', d, 1) or '').split(':')
path.insert(0, os.path.dirname(wrappers))
bb.data.setVar('PATH', ':'.join(path), d)
# 2) exec_func("do_install", d)
bb.build.exec_func('do_install', d)
# 3) read in data collected by the wrappers
bb.build.exec_func('read_manifest', d)
# 4) mangle the manifest we just generated, get paths back into
# our variable form
# 5) write it back out
# 6) re-parse it to ensure the generated functions are proper
bb.build.exec_func('parse_manifest', d)
}
EXPORT_FUNCTIONS do_clean do_mrproper do_fetch do_unpack do_configure do_compile do_install do_package do_patch do_populate_pkgs do_stage
MIRRORS[func] = "0"
MIRRORS () {
${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
ftp://ftp.kernel.org/pub http://www.kernel.org/pub
ftp://ftp.kernel.org/pub ftp://ftp.us.kernel.org/pub
ftp://ftp.kernel.org/pub ftp://ftp.uk.kernel.org/pub
ftp://ftp.kernel.org/pub ftp://ftp.hk.kernel.org/pub
ftp://ftp.kernel.org/pub ftp://ftp.au.kernel.org/pub
ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub
ftp://.*/.*/ http://www.oesources.org/source/current/
http://.*/.*/ http://www.oesources.org/source/current/
}

View File

@ -0,0 +1,20 @@
inherit base package rpm_core
SPECFILE="${RPMBUILDPATH}/SPECS/${PN}.spec"
base_srpm_do_unpack() {
test -e ${SRPMFILE} || die "Source rpm \"${SRPMFILE}\"does not exist"
if ! test -e ${SPECFILE}; then
${RPM} -i ${SRPMFILE}
fi
test -e ${SPECFILE} || die "Spec file \"${SPECFILE}\" does not exist"
${RPMBUILD} -bp ${SPECFILE}
}
base_srpm_do_compile() {
${RPMBUILD} -bc ${SPECFILE}
}
base_srpm_do_install() {
${RPMBUILD} -bi ${SPECFILE}
}

View File

@ -0,0 +1,34 @@
inherit base
# The namespaces can clash here hence the two step replace
def get_binconfig_mangle(d):
import bb.data
s = "-e ''"
if not bb.data.inherits_class('native', d):
s += " -e 's:=${libdir}:=OELIBDIR:;'"
s += " -e 's:=${includedir}:=OEINCDIR:;'"
s += " -e 's:=${datadir}:=OEDATADIR:'"
s += " -e 's:=${prefix}:=OEPREFIX:'"
s += " -e 's:=${exec_prefix}:=OEEXECPREFIX:'"
s += " -e 's:-L${libdir}:-LOELIBDIR:;'"
s += " -e 's:-I${includedir}:-IOEINCDIR:;'"
s += " -e 's:OELIBDIR:${STAGING_LIBDIR}:;'"
s += " -e 's:OEINCDIR:${STAGING_INCDIR}:;'"
s += " -e 's:OEDATADIR:${STAGING_DATADIR}:'"
s += " -e 's:OEPREFIX:${STAGING_LIBDIR}/..:'"
s += " -e 's:OEEXECPREFIX:${STAGING_LIBDIR}/..:'"
return s
# Native package configurations go in ${BINDIR}/<name>-config-native to prevent a collision with cross packages
def is_native(d):
import bb.data
return ["","-native"][bb.data.inherits_class('native', d)]
do_stage_append() {
for config in `find ${S} -name '*-config'`; do
configname=`basename $config`${@is_native(d)}
install -d ${STAGING_BINDIR}
cat $config | sed ${@get_binconfig_mangle(d)} > ${STAGING_BINDIR}/$configname
chmod u+x ${STAGING_BINDIR}/$configname
done
}

View File

@ -0,0 +1,11 @@
# Make ccache use a TMPDIR specific ccache directory if using the crosscompiler,
# since it isn't likely to be useful with any other toolchain than the one we just
# built, and would otherwise push more useful things out of the default cache.
CCACHE_DIR_TARGET = "${TMPDIR}/ccache"
python () {
if not bb.data.inherits_class('native', d) and not bb.data.inherits_class('cross', d):
bb.data.setVar('CCACHE_DIR', '${CCACHE_DIR_TARGET}', d)
bb.data.setVarFlag('CCACHE_DIR', 'export', '1', d)
}

View File

@ -0,0 +1,21 @@
python () {
if bb.data.getVar('PN', d, 1) in ['ccdv-native']:
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d, 1):
bb.data.setVar("DEPENDS", '%s %s' % ("ccdv-native", bb.data.getVar("DEPENDS", d, 1) or ""), d)
bb.data.setVar("CC", '%s %s' % ("ccdv", bb.data.getVar("CC", d, 1) or ""), d)
bb.data.setVar("BUILD_CC", '%s %s' % ("ccdv", bb.data.getVar("BUILD_CC", d, 1) or ""), d)
bb.data.setVar("CCLD", '%s %s' % ("ccdv", bb.data.getVar("CCLD", d, 1) or ""), d)
}
def quiet_libtool(bb,d):
deps = (bb.data.getVar('DEPENDS', d, 1) or "").split()
if 'libtool-cross' in deps:
return "'LIBTOOL=${STAGING_BINDIR}/${HOST_SYS}-libtool --silent'"
elif 'libtool-native' in deps:
return "'LIBTOOL=${B}/${HOST_SYS}-libtool --silent'"
else:
return ""
CCDV = "ccdv"
EXTRA_OEMAKE_append = " ${@quiet_libtool(bb,d)}"
MAKE += "-s"

View File

@ -0,0 +1,8 @@
cml1_do_configure() {
set -e
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
oe_runmake oldconfig
}
EXPORT_FUNCTIONS do_configure
addtask configure after do_unpack do_patch before do_compile

View File

@ -0,0 +1,20 @@
FILES_${PN} += '${libdir}/perl5'
cpan_do_configure () {
perl Makefile.PL
if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
. ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new
mv Makefile.new Makefile
fi
}
cpan_do_compile () {
oe_runmake PASTHRU_INC="${CFLAGS}"
}
cpan_do_install () {
oe_runmake install_vendor
}
EXPORT_FUNCTIONS do_configure do_compile do_install

View File

@ -0,0 +1,54 @@
# Cross packages are built indirectly via dependency,
# no need for them to be a direct target of 'world'
EXCLUDE_FROM_WORLD = "1"
PACKAGES = ""
HOST_ARCH = "${BUILD_ARCH}"
HOST_VENDOR = "${BUILD_VENDOR}"
HOST_OS = "${BUILD_OS}"
HOST_PREFIX = "${BUILD_PREFIX}"
HOST_CC_ARCH = "${BUILD_CC_ARCH}"
CPPFLAGS = "${BUILD_CPPFLAGS}"
CFLAGS = "${BUILD_CFLAGS}"
CXXFLAGS = "${BUILD_CFLAGS}"
LDFLAGS = "${BUILD_LDFLAGS}"
# Overrides for paths
# Path prefixes
base_prefix = "${exec_prefix}"
prefix = "${CROSS_DIR}"
exec_prefix = "${prefix}"
# Base paths
base_bindir = "${base_prefix}/bin"
base_sbindir = "${base_prefix}/bin"
base_libdir = "${base_prefix}/lib"
# Architecture independent paths
datadir = "${prefix}/share"
sysconfdir = "${prefix}/etc"
sharedstatedir = "${prefix}/com"
localstatedir = "${prefix}/var"
infodir = "${datadir}/info"
mandir = "${datadir}/man"
docdir = "${datadir}/doc"
servicedir = "${prefix}/srv"
# Architecture dependent paths
bindir = "${exec_prefix}/bin"
sbindir = "${exec_prefix}/bin"
libexecdir = "${exec_prefix}/libexec"
libdir = "${exec_prefix}/lib"
includedir = "${exec_prefix}/include"
oldincludedir = "${exec_prefix}/include"
do_stage () {
oe_runmake install
}
do_install () {
:
}

View File

@ -0,0 +1,93 @@
python debian_package_name_hook () {
import glob, copy, stat, errno, re
workdir = bb.data.getVar('WORKDIR', d, 1)
packages = bb.data.getVar('PACKAGES', d, 1)
def socrunch(s):
s = s.lower().replace('_', '-')
m = re.match("^(.*)(.)\.so\.(.*)$", s)
if m is None:
return None
if m.group(2) in '0123456789':
bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
else:
bin = m.group(1) + m.group(2) + m.group(3)
dev = m.group(1) + m.group(2)
return (bin, dev)
def isexec(path):
try:
s = os.stat(path)
except (os.error, AttributeError):
return 0
return (s[stat.ST_MODE] & stat.S_IEXEC)
def auto_libname(packages, orig_pkg):
bin_re = re.compile(".*/s?bin$")
lib_re = re.compile(".*/lib$")
so_re = re.compile("lib.*\.so")
sonames = []
has_bins = 0
has_libs = 0
pkg_dir = os.path.join(workdir, "install", orig_pkg)
for root, dirs, files in os.walk(pkg_dir):
if bin_re.match(root) and files:
has_bins = 1
if lib_re.match(root) and files:
has_libs = 1
for f in files:
if so_re.match(f):
fp = os.path.join(root, f)
cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + fp + " 2>/dev/null"
fd = os.popen(cmd)
lines = fd.readlines()
fd.close()
for l in lines:
m = re.match("\s+SONAME\s+([^\s]*)", l)
if m and not m.group(1) in sonames:
sonames.append(m.group(1))
bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
soname = None
if len(sonames) == 1:
soname = sonames[0]
elif len(sonames) > 1:
lead = bb.data.getVar('LEAD_SONAME', d, 1)
if lead:
r = re.compile(lead)
filtered = []
for s in sonames:
if r.match(s):
filtered.append(s)
if len(filtered) == 1:
soname = filtered[0]
elif len(filtered) > 1:
bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
else:
bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
else:
bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))
if has_libs and not has_bins and soname:
soname_result = socrunch(soname)
if soname_result:
(pkgname, devname) = soname_result
for pkg in packages.split():
if (bb.data.getVar('PKG_' + pkg, d)):
continue
if pkg == orig_pkg:
newpkg = pkgname
else:
newpkg = pkg.replace(orig_pkg, devname, 1)
if newpkg != pkg:
bb.data.setVar('PKG_' + pkg, newpkg, d)
for pkg in (bb.data.getVar('AUTO_LIBNAME_PKGS', d, 1) or "").split():
auto_libname(packages, pkg)
}
EXPORT_FUNCTIONS package_name_hook
DEBIAN_NAMES = 1

View File

@ -0,0 +1,14 @@
EXTRA_OEMAKE = ""
DEPENDS += "${@["python-native python", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"
RDEPENDS += "python-core"
def python_dir(d):
import os, bb
staging_incdir = bb.data.getVar( "STAGING_INCDIR", d, 1 )
if os.path.exists( "%s/python2.3" % staging_incdir ): return "python2.3"
if os.path.exists( "%s/python2.4" % staging_incdir ): return "python2.4"
raise "No Python in STAGING_INCDIR. Forgot to build python-native ?"
PYTHON_DIR = "${@python_dir(d)}"
FILES_${PN} = "${bindir} ${libdir} ${libdir}/${PYTHON_DIR}"

View File

@ -0,0 +1,15 @@
inherit distutils-base
distutils_do_compile() {
BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \
${STAGING_BINDIR}/python setup.py build || \
oefatal "python setup.py build execution failed."
}
distutils_do_install() {
BUILD_SYS=${BUILD_SYS} HOST_SYS=${HOST_SYS} \
${STAGING_BINDIR}/python setup.py install --prefix=${D}/${prefix} --install-data=${D}/${datadir} || \
oefatal "python setup.py install execution failed."
}
EXPORT_FUNCTIONS do_compile do_install

View File

@ -0,0 +1,76 @@
MAINTAINER = "Justin Patrin <papercrane@reversefold.com>"
HOMEPAGE = "http://www.enlightenment.org"
SECTION = "e/libs"
SRCNAME = "${@bb.data.getVar('PN', d, 1).replace('-native', '')}"
SRC_URI = "http://enlightenment.freedesktop.org/files/${SRCNAME}-${PV}.tar.gz"
S = "${WORKDIR}/${SRCNAME}-${PV}"
inherit autotools pkgconfig binconfig
do_prepsources () {
make clean distclean || true
}
addtask prepsources after do_fetch before do_unpack
INHIBIT_AUTO_STAGE_INCLUDES = "1"
INHIBIT_NATIVE_STAGE_INSTALL = "1"
libdirectory = "src/lib"
libraries = "lib${SRCNAME}"
headers = "${@bb.data.getVar('SRCNAME',d,1).capitalize()}.h"
def binconfig_suffix(d):
import bb
return ["","-native"][bb.data.inherits_class('native', d)]
export CURL_CONFIG = "${STAGING_BINDIR}/curl-config${@binconfig_suffix(d)}"
export EDB_CONFIG = "${STAGING_BINDIR}/edb-config${@binconfig_suffix(d)}"
export EET_CONFIG = "${STAGING_BINDIR}/eet-config${@binconfig_suffix(d)}"
export EVAS_CONFIG = "${STAGING_BINDIR}/evas-config${@binconfig_suffix(d)}"
export ECORE_CONFIG = "${STAGING_BINDIR}/ecore-config${@binconfig_suffix(d)}"
export EMBRYO_CONFIG = "${STAGING_BINDIR}/embryo-config${@binconfig_suffix(d)}"
export ENGRAVE_CONFIG = "${STAGING_BINDIR}/engrave-config${@binconfig_suffix(d)}"
export ENLIGHTENMENT_CONFIG = "${STAGING_BINDIR}/enlightenment-config${@binconfig_suffix(d)}"
export EPSILON_CONFIG = "${STAGING_BINDIR}/epsilon-config${@binconfig_suffix(d)}"
export EPEG_CONFIG = "${STAGING_BINDIR}/epeg-config${@binconfig_suffix(d)}"
export ESMART_CONFIG = "${STAGING_BINDIR}/esmart-config${@binconfig_suffix(d)}"
export FREETYPE_CONFIG = "${STAGING_BINDIR}/freetype-config${@binconfig_suffix(d)}"
export IMLIB2_CONFIG = "${STAGING_BINDIR}/imlib2-config${@binconfig_suffix(d)}"
do_stage_append () {
for i in ${libraries}
do
oe_libinstall -C ${libdirectory} $i ${STAGING_LIBDIR}
done
for i in ${headers}
do
install -m 0644 ${libdirectory}/$i ${STAGING_INCDIR}
done
# Install binaries automatically for native builds
if [ "${@binconfig_suffix(d)}" = "-native" ]
then
# Most EFL binaries start with the package name
for i in src/bin/${SRCNAME}*
do
if [ -x $i -a -f $i ]
then
# Don't install anything with an extension (.so, etc)
if echo $i | grep -v \\.
then
${HOST_SYS}-libtool --mode=install install -m 0755 $i ${STAGING_BINDIR}
fi
fi
done
fi
}
PACKAGES = "${SRCNAME} ${SRCNAME}-themes ${SRCNAME}-dev ${SRCNAME}-examples"
FILES_${SRCNAME} = "${libdir}/lib*.so*"
FILES_${SRCNAME}-themes = "${datadir}/${SRCNAME}/themes ${datadir}/${SRCNAME}/data ${datadir}/${SRCNAME}/fonts ${datadir}/${SRCNAME}/pointers ${datadir}/${SRCNAME}/images ${datadir}/${SRCNAME}/users ${datadir}/${SRCNAME}/images ${datadir}/${SRCNAME}/styles"
FILES_${SRCNAME}-dev += "${bindir}/${SRCNAME}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${datadir}/${SRCNAME}/include"
FILES_${SRCNAME}-examples = "${bindir} ${datadir}"

View File

@ -0,0 +1,5 @@
# gcc-3.4 blows up in gtktext with -frename-registers on arm-linux
python () {
cflags = (bb.data.getVar('CFLAGS', d, 1) or '').replace('-frename-registers', '')
bb.data.setVar('CFLAGS', cflags, d)
}

View File

@ -0,0 +1,57 @@
gconf_postinst() {
if [ "$1" = configure ]; then
if [ "x$D" != "x" ]; then
exit 1
fi
SCHEMA_LOCATION=/etc/gconf/schemas
for SCHEMA in ${SCHEMA_FILES}; do
if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
gconftool-2 \
--makefile-install-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
fi
done
fi
}
gconf_prerm() {
if [ "$1" = remove ] || [ "$1" = upgrade ]; then
SCHEMA_LOCATION=/etc/gconf/schemas
for SCHEMA in ${SCHEMA_FILES}; do
if [ -e $SCHEMA_LOCATION/$SCHEMA ]; then
HOME=/root GCONF_CONFIG_SOURCE=`gconftool-2 --get-default-source` \
gconftool-2 \
--makefile-uninstall-rule $SCHEMA_LOCATION/$SCHEMA > /dev/null
fi
done
fi
}
python populate_packages_append () {
import os.path, re
packages = bb.data.getVar('PACKAGES', d, 1).split()
workdir = bb.data.getVar('WORKDIR', d, 1)
for pkg in packages:
schema_dir = '%s/install/%s/etc/gconf/schemas' % (workdir, pkg)
schemas = []
schema_re = re.compile(".*\.schemas$")
if os.path.exists(schema_dir):
for f in os.listdir(schema_dir):
if schema_re.match(f):
schemas.append(f)
if schemas != []:
bb.note("adding gconf postinst and prerm scripts to %s" % pkg)
bb.data.setVar('SCHEMA_FILES', " ".join(schemas), d)
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('gconf_postinst', d, 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
prerm = bb.data.getVar('pkg_prerm_%s' % pkg, d, 1) or bb.data.getVar('pkg_prerm', d, 1)
if not prerm:
prerm = '#!/bin/sh\n'
prerm += bb.data.getVar('gconf_prerm', d, 1)
bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d)
}

View File

@ -0,0 +1,12 @@
python () {
# Remove the NLS bits if USE_NLS is no.
if bb.data.getVar('USE_NLS', d, 1) == 'no':
cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d)
cfg += " --disable-nls"
depends = bb.data.getVar('DEPENDS', d, 1) or ""
bb.data.setVar('DEPENDS', oe_filter_out('^(virtual/libiconv|virtual/libintl)$', depends, d), d)
bb.data.setVar('EXTRA_OECONF', cfg, d)
}
DEPENDS =+ "gettext-native"
EXTRA_OECONF += "--enable-nls"

View File

@ -0,0 +1,20 @@
def gnome_verdir(v):
import re
m = re.match("([0-9]+)\.([0-9]+)\..*", v)
return "%s.%s" % (m.group(1), m.group(2))
SECTION ?= "x11/gnome"
SRC_URI = "${GNOME_MIRROR}/${PN}/${@gnome_verdir("${PV}")}/${PN}-${PV}.tar.bz2"
DEPENDS += "gnome-common"
FILES_${PN} += "${datadir}/application-registry ${datadir}/mime-info \
${datadir}/gnome-2.0"
inherit autotools pkgconfig gconf
EXTRA_AUTORECONF += "-I ${STAGING_DIR}/${HOST_SYS}/share/aclocal/gnome2-macros"
gnome_stage_includes() {
autotools_stage_includes
}

View File

@ -0,0 +1,16 @@
DEPENDS_prepend = "coreutils-native virtual/libintl intltool-native "
GPE_TARBALL_SUFFIX ?= "gz"
SRC_URI = "${GPE_MIRROR}/${PN}-${PV}.tar.${GPE_TARBALL_SUFFIX}"
FILES_${PN} += "${datadir}/gpe ${datadir}/application-registry"
inherit gettext
gpe_do_compile() {
oe_runmake PREFIX=${prefix}
}
gpe_do_install() {
oe_runmake PREFIX=${prefix} DESTDIR=${D} install
}
EXPORT_FUNCTIONS do_compile do_install

View File

@ -0,0 +1,156 @@
# IceCream distributed compiling support
#
# We need to create a tar.bz2 of our toolchain and set
# ICECC_VERSION, ICECC_CXX and ICEC_CC
#
def create_env(bb,d):
"""
Create a tar.bz of the current toolchain
"""
# Constin native-native compilation no environment needed if
# host prefix is empty (let us duplicate the query for ease)
prefix = bb.data.expand('${HOST_PREFIX}', d)
if len(prefix) == 0:
return ""
import tarfile
import socket
import time
import os
ice_dir = bb.data.expand('${CROSS_DIR}', d)
prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d)
target_sys = bb.data.expand('${TARGET_SYS}', d)
#float = bb.data.getVar('${TARGET_FPU}', d)
float = "anyfloat"
name = socket.gethostname()
try:
os.stat(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2')
os.stat(ice_dir + '/' + target_sys + '/bin/g++')
except:
return ""
VERSION = '3.4.3'
cross_name = prefix + distro + target_sys + float +VERSION+ name
tar_file = ice_dir + '/ice/' + cross_name + '.tar.bz2'
try:
os.stat(tar_file)
return tar_file
except:
try:
os.makedirs(ice_dir+'/ice')
except:
pass
# FIXME find out the version of the compiler
tar = tarfile.open(tar_file, 'w:bz2')
tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
target_sys + 'cross/lib/ld-linux.so.2')
tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
target_sys + 'cross/lib/ld-2.3.3.so')
tar.add(ice_dir + '/' + target_sys + '/lib/libc-2.3.3.so',
target_sys + 'cross/lib/libc-2.3.3.so')
tar.add(ice_dir + '/' + target_sys + '/lib/libc.so.6',
target_sys + 'cross/lib/libc.so.6')
tar.add(ice_dir + '/' + target_sys + '/bin/gcc',
target_sys + 'cross/usr/bin/gcc')
tar.add(ice_dir + '/' + target_sys + '/bin/g++',
target_sys + 'cross/usr/bin/g++')
tar.add(ice_dir + '/' + target_sys + '/bin/as',
target_sys + 'cross/usr/bin/as')
tar.add(ice_dir + '/lib/gcc/' + target_sys +'/'+ VERSION + '/specs',
target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/specs')
tar.add(ice_dir + '/libexec/gcc/'+target_sys+'/' + VERSION + '/cc1',
target_sys + 'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1')
tar.add(ice_dir + '/libexec/gcc/arm-linux/' + VERSION + '/cc1plus',
target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1plus')
tar.close()
return tar_file
def create_path(compilers, type, bb, d):
"""
Create Symlinks for the icecc in the staging directory
"""
import os
staging = bb.data.expand('${STAGING_DIR}', d) + "/ice/" + type
icecc = bb.data.getVar('ICECC_PATH', d)
# Create the dir if necessary
try:
os.stat(staging)
except:
os.makedirs(staging)
for compiler in compilers:
gcc_path = staging + "/" + compiler
try:
os.stat(gcc_path)
except:
os.symlink(icecc, gcc_path)
return staging + ":"
def use_icc_version(bb,d):
# Constin native native
prefix = bb.data.expand('${HOST_PREFIX}', d)
if len(prefix) == 0:
return "no"
native = bb.data.expand('${PN}', d)
blacklist = [ "-cross", "-native" ]
for black in blacklist:
if black in native:
return "no"
return "yes"
def icc_path(bb,d,compile):
native = bb.data.expand('${PN}', d)
blacklist = [ "ulibc", "glibc", "ncurses" ]
for black in blacklist:
if black in native:
return ""
if "-native" in native:
compile = False
if "-cross" in native:
compile = False
prefix = bb.data.expand('${HOST_PREFIX}', d)
if compile and len(prefix) != 0:
return create_path( [prefix+"gcc", prefix+"g++"], "cross", bb, d )
elif not compile or len(prefix) == 0:
return create_path( ["gcc", "g++"], "native", bb, d)
def icc_version(bb,d):
return create_env(bb,d)
#
# set the IceCream environment variables
do_configure_prepend() {
export PATH=${@icc_path(bb,d,False)}$PATH
export ICECC_CC="gcc"
export ICECC_CXX="g++"
}
do_compile_prepend() {
export PATH=${@icc_path(bb,d,True)}$PATH
export ICECC_CC="${HOST_PREFIX}gcc"
export ICECC_CXX="${HOST_PREFIX}g++"
if [ "${@use_icc_version(bb,d)}" = "yes" ]; then
export ICECC_VERSION="${@icc_version(bb,d)}"
fi
}

View File

@ -0,0 +1,72 @@
inherit rootfs_ipk
# Images are generally built explicitly, do not need to be part of world.
EXCLUDE_FROM_WORLD = "1"
USE_DEVFS ?= "0"
DEPENDS += "makedevs-native"
def get_image_deps(d):
import bb
str = ""
for type in (bb.data.getVar('IMAGE_FSTYPES', d, 1) or "").split():
deps = bb.data.getVar('IMAGE_DEPENDS_%s' % type, d) or ""
if deps:
str += " %s" % deps
return str
DEPENDS += "${@get_image_deps(d)}"
IMAGE_DEVICE_TABLE ?= "${@bb.which(bb.data.getVar('BBPATH', d, 1), 'files/device_table-minimal.txt')}"
IMAGE_POSTPROCESS_COMMAND ?= ""
# Must call real_do_rootfs() from inside here, rather than as a separate
# task, so that we have a single fakeroot context for the whole process.
fakeroot do_rootfs () {
set -x
rm -rf ${IMAGE_ROOTFS}
if [ "${USE_DEVFS}" != "1" ]; then
mkdir -p ${IMAGE_ROOTFS}/dev
makedevs -r ${IMAGE_ROOTFS} -D ${IMAGE_DEVICE_TABLE}
fi
real_do_rootfs
insert_feed_uris
rm -f ${IMAGE_ROOTFS}${libdir}/ipkg/lists/oe
${IMAGE_PREPROCESS_COMMAND}
export TOPDIR=${TOPDIR}
for type in ${IMAGE_FSTYPES}; do
if test -z "$FAKEROOTKEY"; then
fakeroot -i ${TMPDIR}/fakedb.image bbimage -t $type -e ${FILE}
else
bbimage -n "${IMAGE_NAME}" -t "$type" -e "${FILE}"
fi
done
${IMAGE_POSTPROCESS_COMMAND}
}
insert_feed_uris () {
echo "Building feeds for [${DISTRO}].."
for line in ${FEED_URIS}
do
# strip leading and trailing spaces/tabs, then split into name and uri
line_clean="`echo "$line"|sed 's/^[ \t]*//;s/[ \t]*$//'`"
feed_name="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\1/p'`"
feed_uri="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\2/p'`"
echo "Added $feed_name feed with URL $feed_uri"
# insert new feed-sources
echo "src/gz $feed_name $feed_uri" >> ${IMAGE_ROOTFS}/etc/ipkg/${feed_name}-feed.conf
done
}

View File

@ -0,0 +1,26 @@
#
# set the ARCH environment variable for kernel compilation (including
# modules). return value must match one of the architecture directories
# in the kernel source "arch" directory
#
valid_archs = "alpha cris ia64 m68knommu ppc sh \
sparc64 x86_64 arm h8300 m32r mips \
ppc64 sh64 um arm26 i386 m68k \
parisc s390 sparc v850"
def map_kernel_arch(a, d):
import bb, re
valid_archs = bb.data.getVar('valid_archs', d, 1).split()
if re.match('(i.86|athlon)$', a): return 'i386'
elif re.match('arm26$', a): return 'arm26'
elif re.match('armeb$', a): return 'arm'
elif re.match('powerpc$', a): return 'ppc'
elif re.match('mipsel$', a): return 'mips'
elif a in valid_archs: return a
else:
bb.error("cannot map '%s' to a linux kernel architecture" % a)
export ARCH = "${@map_kernel_arch(bb.data.getVar('TARGET_ARCH', d, 1), d)}"

View File

@ -0,0 +1,382 @@
inherit module_strip
PROVIDES += "virtual/kernel"
DEPENDS += "virtual/${TARGET_PREFIX}depmod-${@get_kernelmajorversion('${PV}')} virtual/${TARGET_PREFIX}gcc${KERNEL_CCSUFFIX} update-modules"
inherit kernel-arch
export OS = "${TARGET_OS}"
export CROSS_COMPILE = "${TARGET_PREFIX}"
KERNEL_IMAGETYPE = "zImage"
KERNEL_PRIORITY = "${@bb.data.getVar('PV',d,1).split('-')[0].split('.')[-1]}"
KERNEL_CCSUFFIX ?= ""
KERNEL_LDSUFFIX ?= ""
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX}"
KERNEL_LD = "${LD}${KERNEL_LDSUFFIX}"
KERNEL_OUTPUT = "arch/${ARCH}/boot/${KERNEL_IMAGETYPE}"
KERNEL_IMAGEDEST = "boot"
#
# configuration
#
export CMDLINE_CONSOLE = "console=${@bb.data.getVar("KERNEL_CONSOLE",d,1) or "ttyS0"}"
# parse kernel ABI version out of <linux/version.h>
def get_kernelversion(p):
import re
try:
f = open(p, 'r')
except IOError:
return None
l = f.readlines()
f.close()
r = re.compile("#define UTS_RELEASE \"(.*)\"")
for s in l:
m = r.match(s)
if m:
return m.group(1)
return None
def get_kernelmajorversion(p):
import re
r = re.compile("([0-9]+\.[0-9]+).*")
m = r.match(p);
if m:
return m.group(1)
return None
KERNEL_VERSION = "${@get_kernelversion('${S}/include/linux/version.h')}"
KERNEL_MAJOR_VERSION = "${@get_kernelmajorversion('${KERNEL_VERSION}')}"
KERNEL_LOCALVERSION ?= ""
# kernels are generally machine specific
PACKAGE_ARCH = "${MACHINE_ARCH}"
kernel_do_compile() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
oe_runmake include/linux/version.h CC="${KERNEL_CC}" LD="${KERNEL_LD}"
oe_runmake dep CC="${KERNEL_CC}" LD="${KERNEL_LD}"
oe_runmake ${KERNEL_IMAGETYPE} CC="${KERNEL_CC}" LD="${KERNEL_LD}"
if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then
oe_runmake modules CC="${KERNEL_CC}" LD="${KERNEL_LD}"
else
oenote "no modules to compile"
fi
}
kernel_do_stage() {
ASMDIR=`readlink include/asm`
mkdir -p ${STAGING_KERNEL_DIR}/include/$ASMDIR
cp -fR include/$ASMDIR/* ${STAGING_KERNEL_DIR}/include/$ASMDIR/
ln -sf $ASMDIR ${STAGING_KERNEL_DIR}/include/asm
mkdir -p ${STAGING_KERNEL_DIR}/include/asm-generic
cp -fR include/asm-generic/* ${STAGING_KERNEL_DIR}/include/asm-generic/
mkdir -p ${STAGING_KERNEL_DIR}/include/linux
cp -fR include/linux/* ${STAGING_KERNEL_DIR}/include/linux/
mkdir -p ${STAGING_KERNEL_DIR}/include/net
cp -fR include/net/* ${STAGING_KERNEL_DIR}/include/net/
mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia
cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/
if [ -d drivers/sound ]; then
# 2.4 alsa needs some headers from this directory
mkdir -p ${STAGING_KERNEL_DIR}/include/drivers/sound
cp -fR drivers/sound/*.h ${STAGING_KERNEL_DIR}/include/drivers/sound/
fi
install -m 0644 .config ${STAGING_KERNEL_DIR}/config-${PV}${KERNEL_LOCALVERSION}
ln -sf config-${PV}${KERNEL_LOCALVERSION} ${STAGING_KERNEL_DIR}/.config
ln -sf config-${PV}${KERNEL_LOCALVERSION} ${STAGING_KERNEL_DIR}/kernel-config
echo "${KERNEL_VERSION}" >${STAGING_KERNEL_DIR}/kernel-abiversion
echo "${S}" >${STAGING_KERNEL_DIR}/kernel-source
echo "${KERNEL_CCSUFFIX}" >${STAGING_KERNEL_DIR}/kernel-ccsuffix
echo "${KERNEL_LDSUFFIX}" >${STAGING_KERNEL_DIR}/kernel-ldsuffix
[ -e Rules.make ] && install -m 0644 Rules.make ${STAGING_KERNEL_DIR}/
[ -e Makefile ] && install -m 0644 Makefile ${STAGING_KERNEL_DIR}/
# Check if arch/${ARCH}/Makefile exists and install it
if [ -e arch/${ARCH}/Makefile ]; then
install -d ${STAGING_KERNEL_DIR}/arch/${ARCH}
install -m 0644 arch/${ARCH}/Makefile ${STAGING_KERNEL_DIR}/arch/${ARCH}
fi
cp -fR include/config* ${STAGING_KERNEL_DIR}/include/
install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE}
install -m 0644 System.map ${STAGING_KERNEL_DIR}/System.map-${PV}${KERNEL_LOCALVERSION}
[ -e Module.symvers ] && install -m 0644 Module.symvers ${STAGING_KERNEL_DIR}/
cp -fR scripts ${STAGING_KERNEL_DIR}/
}
kernel_do_install() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then
oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" modules_install
else
oenote "no modules to install"
fi
install -d ${D}/${KERNEL_IMAGEDEST}
install -d ${D}/boot
install -m 0644 ${KERNEL_OUTPUT} ${D}/${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${PV}${KERNEL_LOCALVERSION}
install -m 0644 System.map ${D}/boot/System.map-${PV}${KERNEL_LOCALVERSION}
install -m 0644 .config ${D}/boot/config-${PV}${KERNEL_LOCALVERSION}
install -d ${D}/etc/modutils
# Check if scripts/genksyms exists and if so, build it
if [ -e scripts/genksyms/ ]; then
oe_runmake SUBDIRS="scripts/genksyms"
fi
cp -fR scripts ${STAGING_KERNEL_DIR}/
}
kernel_do_configure() {
yes '' | oe_runmake oldconfig
}
pkg_postinst_kernel () {
update-alternatives --install /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE} ${KERNEL_IMAGETYPE} /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${PV}${KERNEL_LOCALVERSION} ${KERNEL_PRIORITY} || true
}
pkg_postrm_kernel () {
update-alternatives --remove ${KERNEL_IMAGETYPE} /${KERNEL_IMAGEDEST}/${KERNEL_IMAGETYPE}-${PV}${KERNEL_LOCALVERSION} || true
}
inherit cml1
EXPORT_FUNCTIONS do_compile do_install do_stage do_configure
PACKAGES = "kernel kernel-image kernel-dev"
FILES = ""
FILES_kernel-image = "/boot/${KERNEL_IMAGETYPE}*"
FILES_kernel-dev = "/boot/System.map* /boot/config*"
RDEPENDS_kernel = "kernel-image-${KERNEL_VERSION}"
PKG_kernel-image = "kernel-image-${KERNEL_VERSION}"
ALLOW_EMPTY_kernel = "1"
ALLOW_EMPTY_kernel-image = "1"
pkg_postinst_modules () {
if [ -n "$D" ]; then
${HOST_PREFIX}depmod -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${PV}${KERNEL_LOCALVERSION} ${KERNEL_VERSION}
else
depmod -A
update-modules || true
fi
}
pkg_postrm_modules () {
update-modules || true
}
autoload_postinst_fragment() {
if [ x"$D" = "x" ]; then
modprobe %s || true
fi
}
# autoload defaults (alphabetically sorted)
module_autoload_hidp = "hidp"
module_autoload_ipv6 = "ipv6"
module_autoload_ipsec = "ipsec"
module_autoload_ircomm-tty = "ircomm-tty"
module_autoload_rfcomm = "rfcomm"
module_autoload_sa1100-rtc = "sa1100-rtc"
# alias defaults (alphabetically sorted)
module_conf_af_packet = "alias net-pf-17 af_packet"
module_conf_bluez = "alias net-pf-31 bluez"
module_conf_bnep = "alias bt-proto-4 bnep"
module_conf_hci_uart = "alias tty-ldisc-15 hci_uart"
module_conf_l2cap = "alias bt-proto-0 l2cap"
module_conf_sco = "alias bt-proto-2 sco"
module_conf_rfcomm = "alias bt-proto-3 rfcomm"
python populate_packages_prepend () {
def extract_modinfo(file):
import os, re
tmpfile = os.tmpnam()
cmd = "PATH=\"%s\" %sobjcopy -j .modinfo -O binary %s %s" % (bb.data.getVar("PATH", d, 1), bb.data.getVar("HOST_PREFIX", d, 1) or "", file, tmpfile)
os.system(cmd)
f = open(tmpfile)
l = f.read().split("\000")
f.close()
os.unlink(tmpfile)
exp = re.compile("([^=]+)=(.*)")
vals = {}
for i in l:
m = exp.match(i)
if not m:
continue
vals[m.group(1)] = m.group(2)
return vals
def parse_depmod():
import os, re
dvar = bb.data.getVar('D', d, 1)
if not dvar:
bb.error("D not defined")
return
kernelver = bb.data.getVar('PV', d, 1) + bb.data.getVar('KERNEL_LOCALVERSION', d, 1)
kernelver_stripped = kernelver
m = re.match('^(.*-hh.*)[\.\+].*$', kernelver)
if m:
kernelver_stripped = m.group(1)
path = bb.data.getVar("PATH", d, 1)
host_prefix = bb.data.getVar("HOST_PREFIX", d, 1) or ""
cmd = "PATH=\"%s\" %sdepmod -n -a -r -b %s -F %s/boot/System.map-%s %s" % (path, host_prefix, dvar, dvar, kernelver, kernelver_stripped)
f = os.popen(cmd, 'r')
deps = {}
pattern0 = "^(.*\.k?o):..*$"
pattern1 = "^(.*\.k?o):\s*(.*\.k?o)\s*$"
pattern2 = "^(.*\.k?o):\s*(.*\.k?o)\s*\\\$"
pattern3 = "^\t(.*\.k?o)\s*\\\$"
pattern4 = "^\t(.*\.k?o)\s*$"
line = f.readline()
while line:
if not re.match(pattern0, line):
line = f.readline()
continue
m1 = re.match(pattern1, line)
if m1:
deps[m1.group(1)] = m1.group(2).split()
else:
m2 = re.match(pattern2, line)
if m2:
deps[m2.group(1)] = m2.group(2).split()
line = f.readline()
m3 = re.match(pattern3, line)
while m3:
deps[m2.group(1)].extend(m3.group(1).split())
line = f.readline()
m3 = re.match(pattern3, line)
m4 = re.match(pattern4, line)
deps[m2.group(1)].extend(m4.group(1).split())
line = f.readline()
f.close()
return deps
def get_dependencies(file, pattern, format):
file = file.replace(bb.data.getVar('D', d, 1) or '', '', 1)
if module_deps.has_key(file):
import os.path, re
dependencies = []
for i in module_deps[file]:
m = re.match(pattern, os.path.basename(i))
if not m:
continue
on = legitimize_package_name(m.group(1))
dependency_pkg = format % on
v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0"
if v == "1":
kv = bb.data.getVar("KERNEL_MAJOR_VERSION", d, 1)
dependency_pkg = "%s-%s" % (dependency_pkg, kv)
dependencies.append(dependency_pkg)
return dependencies
return []
def frob_metadata(file, pkg, pattern, format, basename):
import re
vals = extract_modinfo(file)
dvar = bb.data.getVar('D', d, 1)
# If autoloading is requested, output /etc/modutils/<name> and append
# appropriate modprobe commands to the postinst
autoload = bb.data.getVar('module_autoload_%s' % basename, d, 1)
if autoload:
name = '%s/etc/modutils/%s' % (dvar, basename)
f = open(name, 'w')
for m in autoload.split():
f.write('%s\n' % m)
f.close()
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1)
if not postinst:
bb.fatal("pkg_postinst_%s not defined" % pkg)
postinst += bb.data.getVar('autoload_postinst_fragment', d, 1) % autoload
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
# Write out any modconf fragment
modconf = bb.data.getVar('module_conf_%s' % basename, d, 1)
if modconf:
name = '%s/etc/modutils/%s.conf' % (dvar, basename)
f = open(name, 'w')
f.write("%s\n" % modconf)
f.close()
files = bb.data.getVar('FILES_%s' % pkg, d, 1)
files = "%s /etc/modutils/%s /etc/modutils/%s.conf" % (files, basename, basename)
bb.data.setVar('FILES_%s' % pkg, files, d)
if vals.has_key("description"):
old_desc = bb.data.getVar('DESCRIPTION_' + pkg, d, 1) or ""
bb.data.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"], d)
rdepends_str = bb.data.getVar('RDEPENDS_' + pkg, d, 1)
if rdepends_str:
rdepends = rdepends_str.split()
else:
rdepends = []
rdepends.extend(get_dependencies(file, pattern, format))
bb.data.setVar('RDEPENDS_' + pkg, ' '.join(rdepends), d)
module_deps = parse_depmod()
module_regex = '^(.*)\.k?o$'
module_pattern = 'kernel-module-%s'
postinst = bb.data.getVar('pkg_postinst_modules', d, 1)
postrm = bb.data.getVar('pkg_postrm_modules', d, 1)
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='update-modules kernel-image-%s' % bb.data.getVar("KERNEL_VERSION", d, 1))
import re, os
metapkg = "kernel-modules"
bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d)
bb.data.setVar('FILES_' + metapkg, "", d)
blacklist = [ 'kernel-dev', 'kernel-image' ]
for l in module_deps.values():
for i in l:
pkg = module_pattern % legitimize_package_name(re.match(module_regex, os.path.basename(i)).group(1))
blacklist.append(pkg)
metapkg_rdepends = []
packages = bb.data.getVar('PACKAGES', d, 1).split()
for pkg in packages[1:]:
if not pkg in blacklist and not pkg in metapkg_rdepends:
metapkg_rdepends.append(pkg)
bb.data.setVar('RDEPENDS_' + metapkg, ' '.join(metapkg_rdepends), d)
bb.data.setVar('DESCRIPTION_' + metapkg, 'Kernel modules meta package', d)
packages.append(metapkg)
bb.data.setVar('PACKAGES', ' '.join(packages), d)
v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0"
if v == "1":
kv = bb.data.getVar("KERNEL_MAJOR_VERSION", d, 1)
packages = bb.data.getVar("PACKAGES", d, 1)
module_re = re.compile("^kernel-module-")
for p in packages.split():
if not module_re.match(p):
continue
pkg = bb.data.getVar("PKG_%s" % p, d, 1) or p
newpkg = "%s-%s" % (pkg, kv)
bb.data.setVar("PKG_%s" % p, newpkg, d)
rprovides = bb.data.getVar("RPROVIDES_%s" % p, d, 1)
if rprovides:
rprovides = "%s %s" % (rprovides, pkg)
else:
rprovides = pkg
bb.data.setVar("RPROVIDES_%s" % p, rprovides, d)
}

View File

@ -0,0 +1,9 @@
PACKAGES = "${PN} ${PN}-dev ${PN}-doc ${PN}-bin"
FILES_${PN} = "${libexecdir} ${libdir}/lib*.so.* \
${sysconfdir} ${sharedstatedir} ${localstatedir} \
/lib/*.so* ${datadir}/${PN} ${libdir}/${PN}"
FILES_${PN}-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \
${libdir}/*.a ${libdir}/pkgconfig /lib/*.a /lib/*.o \
${datadir}/aclocal ${bindir}/*-config"
FILES_${PN}-bin = "${bindir} ${sbindir} /bin /sbin"

View File

@ -0,0 +1,19 @@
def get_kernelmajorversion(p):
import re
r = re.compile("([0-9]+\.[0-9]+).*")
m = r.match(p);
if m:
return m.group(1)
return None
def linux_module_packages(s, d):
import bb, os.path
suffix = ""
if (bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) == "1"):
file = bb.data.expand('${STAGING_KERNEL_DIR}/kernel-abiversion', d)
if (os.path.exists(file)):
suffix = "-%s" % (get_kernelmajorversion(base_read_file(file)))
return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split()))
# that's all

View File

@ -0,0 +1,17 @@
inherit module_strip
inherit kernel-arch
export OS = "${TARGET_OS}"
export CROSS_COMPILE = "${TARGET_PREFIX}"
export KERNEL_VERSION = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-abiversion')}"
export KERNEL_SOURCE = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-source')}"
KERNEL_OBJECT_SUFFIX = "${@[".o", ".ko"][base_read_file('${STAGING_KERNEL_DIR}/kernel-abiversion') > "2.6.0"]}"
KERNEL_CCSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ccsuffix')}"
KERNEL_LDSUFFIX = "${@base_read_file('${STAGING_KERNEL_DIR}/kernel-ldsuffix')}"
KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc${KERNEL_CCSUFFIX}"
KERNEL_LD = "${LD}${KERNEL_LDSUFFIX}"
# kernel modules are generally machine specific
PACKAGE_ARCH = "${MACHINE_ARCH}"

View File

@ -0,0 +1,51 @@
RDEPENDS += "kernel (${KERNEL_VERSION})"
DEPENDS += "virtual/kernel"
inherit module-base
python populate_packages_prepend() {
v = bb.data.getVar("PARALLEL_INSTALL_MODULES", d, 1) or "0"
if v == "1":
kv = bb.data.getVar("KERNEL_VERSION", d, 1)
packages = bb.data.getVar("PACKAGES", d, 1)
for p in packages.split():
pkg = bb.data.getVar("PKG_%s" % p, d, 1) or p
newpkg = "%s-%s" % (pkg, kv)
bb.data.setVar("PKG_%s" % p, newpkg, d)
rprovides = bb.data.getVar("RPROVIDES_%s" % p, d, 1)
if rprovides:
rprovides = "%s %s" % (rprovides, pkg)
else:
rprovides = pkg
bb.data.setVar("RPROVIDES_%s" % p, rprovides, d)
}
module_do_compile() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
oe_runmake KERNEL_PATH=${STAGING_KERNEL_DIR} \
KERNEL_SRC=${STAGING_KERNEL_DIR} \
KERNEL_VERSION=${KERNEL_VERSION} \
CC="${KERNEL_CC}" LD="${KERNEL_LD}" \
${MAKE_TARGETS}
}
module_do_install() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS
oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" CC="${KERNEL_CC}" LD="${KERNEL_LD}" modules_install
}
pkg_postinst_append () {
if [ -n "$D" ]; then
exit 1
fi
depmod -A
update-modules || true
}
pkg_postrm_append () {
update-modules || true
}
EXPORT_FUNCTIONS do_compile do_install
FILES_${PN} = "/etc /lib/modules"

View File

@ -0,0 +1,18 @@
#DEPENDS_append = " module-strip"
do_strip_modules () {
for p in ${PACKAGES}; do
if test -e ${WORKDIR}/install/$p/lib/modules; then
modules="`find ${WORKDIR}/install/$p/lib/modules -name \*${KERNEL_OBJECT_SUFFIX}`"
if [ -n "$modules" ]; then
${STRIP} -v -g $modules
# NM="${CROSS_DIR}/bin/${HOST_PREFIX}nm" OBJCOPY="${CROSS_DIR}/bin/${HOST_PREFIX}objcopy" strip_module $modules
fi
fi
done
}
python do_package_append () {
if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1'):
bb.build.exec_func('do_strip_modules', d)
}

View File

@ -0,0 +1,53 @@
SECTION = "x11/utils"
DEPENDS += "gnu-config-native virtual/libintl xt libxi \
zip-native gtk+ orbit2 libidl-native"
LICENSE = "MPL NPL"
SRC_URI += "file://mozconfig"
inherit gettext
EXTRA_OECONF = "--target=${TARGET_SYS} --host=${BUILD_SYS} \
--build=${BUILD_SYS} --prefix=${prefix}"
EXTRA_OEMAKE = "'HOST_LIBIDL_LIBS=${HOST_LIBIDL_LIBS}' \
'HOST_LIBIDL_CFLAGS=${HOST_LIBIDL_CFLAGS}'"
SELECTED_OPTIMIZATION = "-Os -fsigned-char -fno-strict-aliasing"
export CROSS_COMPILE = "1"
export MOZCONFIG = "${WORKDIR}/mozconfig"
export MOZ_OBJDIR = "${S}"
export CONFIGURE_ARGS = "${EXTRA_OECONF}"
export HOST_LIBIDL_CFLAGS = "`${HOST_LIBIDL_CONFIG} --cflags`"
export HOST_LIBIDL_LIBS = "`${HOST_LIBIDL_CONFIG} --libs`"
export HOST_LIBIDL_CONFIG = "PKG_CONFIG_PATH=${STAGING_BINDIR}/../share/pkgconfig pkg-config libIDL-2.0"
export HOST_CC = "${BUILD_CC}"
export HOST_CXX = "${BUILD_CXX}"
export HOST_CFLAGS = "${BUILD_CFLAGS}"
export HOST_CXXFLAGS = "${BUILD_CXXFLAGS}"
export HOST_LDFLAGS = "${BUILD_LDFLAGS}"
export HOST_RANLIB = "${BUILD_RANLIB}"
export HOST_AR = "${BUILD_AR}"
mozilla_do_configure() {
(
set -e
for cg in `find ${S} -name config.guess`; do
install -m 0755 \
${STAGING_BINDIR}/../share/gnu-config/config.guess \
${STAGING_BINDIR}/../share/gnu-config/config.sub \
`dirname $cg`/
done
)
oe_runmake -f client.mk ${MOZ_OBJDIR}/Makefile \
${MOZ_OBJDIR}/config.status
}
mozilla_do_compile() {
oe_runmake -f client.mk build_all
}
mozilla_do_install() {
oe_runmake DESTDIR="${D}" destdir="${D}" install
}
EXPORT_FUNCTIONS do_configure do_compile do_install

View File

@ -0,0 +1,3 @@
STAMP = "${TMPDIR}/stamps/${PACKAGE_ARCH}-${HOST_OS}/${PF}"
WORKDIR = "${TMPDIR}/work/${PACKAGE_ARCH}-${HOST_OS}/${PF}"
STAGING_KERNEL_DIR = "${STAGING_DIR}/${PACKAGE_ARCH}-${HOST_OS}/kernel"

View File

@ -0,0 +1,68 @@
inherit base
# Native packages are built indirectly via dependency,
# no need for them to be a direct target of 'world'
EXCLUDE_FROM_WORLD = "1"
PACKAGES = ""
TARGET_ARCH = "${BUILD_ARCH}"
TARGET_OS = "${BUILD_OS}"
TARGET_VENDOR = "${BUILD_VENDOR}"
TARGET_PREFIX = "${BUILD_PREFIX}"
TARGET_CC_ARCH = "${BUILD_CC_ARCH}"
HOST_ARCH = "${BUILD_ARCH}"
HOST_OS = "${BUILD_OS}"
HOST_VENDOR = "${BUILD_VENDOR}"
HOST_PREFIX = "${BUILD_PREFIX}"
HOST_CC_ARCH = "${BUILD_CC_ARCH}"
CPPFLAGS = "${BUILD_CPPFLAGS}"
CFLAGS = "${BUILD_CFLAGS}"
CXXFLAGS = "${BUILD_CFLAGS}"
LDFLAGS = "${BUILD_LDFLAGS}"
# Path prefixes
base_prefix = "${exec_prefix}"
prefix = "${STAGING_DIR}"
exec_prefix = "${STAGING_DIR}/${BUILD_ARCH}-${BUILD_OS}"
# Base paths
base_bindir = "${base_prefix}/bin"
base_sbindir = "${base_prefix}/bin"
base_libdir = "${base_prefix}/lib"
# Architecture independent paths
sysconfdir = "${prefix}/etc"
sharedstatedir = "${prefix}/com"
localstatedir = "${prefix}/var"
infodir = "${datadir}/info"
mandir = "${datadir}/man"
docdir = "${datadir}/doc"
servicedir = "${prefix}/srv"
# Architecture dependent paths
bindir = "${exec_prefix}/bin"
sbindir = "${exec_prefix}/bin"
libexecdir = "${exec_prefix}/libexec"
libdir = "${exec_prefix}/lib"
includedir = "${exec_prefix}/include"
oldincludedir = "${exec_prefix}/include"
# Datadir is made arch dependent here, primarily
# for autoconf macros, and other things that
# may be manipulated to handle crosscompilation
# issues.
datadir = "${exec_prefix}/share"
do_stage () {
if [ "${INHIBIT_NATIVE_STAGE_INSTALL}" != "1" ]
then
oe_runmake install
fi
}
do_install () {
true
}

View File

@ -0,0 +1,18 @@
NSLU2_SLUGIMAGE_ARGS ?= ""
nslu2_pack_image () {
install -d ${DEPLOY_DIR_IMAGE}/slug
install -m 0644 ${STAGING_LIBDIR}/nslu2-binaries/RedBoot \
${STAGING_LIBDIR}/nslu2-binaries/Trailer \
${STAGING_LIBDIR}/nslu2-binaries/SysConf \
${DEPLOY_DIR_IMAGE}/slug/
install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-${IMAGE_BASENAME} ${DEPLOY_DIR_IMAGE}/slug/vmlinuz
install -m 0644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.jffs2 ${DEPLOY_DIR_IMAGE}/slug/flashdisk.jffs2
cd ${DEPLOY_DIR_IMAGE}/slug
slugimage -p -b RedBoot -s SysConf -r Ramdisk:1,Flashdisk:flashdisk.jffs2 -t Trailer \
-o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.flashdisk.img ${NSLU2_SLUGIMAGE_ARGS}
rm -rf ${DEPLOY_DIR_IMAGE}/slug
}
EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware'
IMAGE_POSTPROCESS_COMMAND += "nslu2_pack_image; "

View File

@ -0,0 +1,4 @@
MIRRORS_append () {
ftp://.*/.*/ http://sources.nslu2-linux.org/sources/
http://.*/.*/ http://sources.nslu2-linux.org/sources/
}

View File

@ -0,0 +1,18 @@
NSLU2_SLUGIMAGE_ARGS ?= ""
nslu2_pack_image () {
install -d ${DEPLOY_DIR_IMAGE}/slug
install -m 0644 ${STAGING_LIBDIR}/nslu2-binaries/RedBoot \
${STAGING_LIBDIR}/nslu2-binaries/Trailer \
${STAGING_LIBDIR}/nslu2-binaries/SysConf \
${DEPLOY_DIR_IMAGE}/slug/
install -m 0644 ${DEPLOY_DIR_IMAGE}/zImage-${IMAGE_BASENAME} ${DEPLOY_DIR_IMAGE}/slug/vmlinuz
install -m 0644 ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ext2.gz ${DEPLOY_DIR_IMAGE}/slug/ramdisk.ext2.gz
cd ${DEPLOY_DIR_IMAGE}/slug
slugimage -p -b RedBoot -s SysConf -r Ramdisk:ramdisk.ext2.gz -t Trailer \
-o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.ramdisk.img ${NSLU2_SLUGIMAGE_ARGS}
rm -rf ${DEPLOY_DIR_IMAGE}/slug
}
EXTRA_IMAGEDEPENDS += 'slugimage-native nslu2-linksys-firmware'
IMAGE_POSTPROCESS_COMMAND += "nslu2_pack_image; "

View File

@ -0,0 +1,6 @@
MIRRORS_append () {
ftp://.*/.*/ http://meshcube.org/nylon/stable/sources/
http://.*/.*/ http://meshcube.org/nylon/stable/sources/
ftp://.*/.*/ http://meshcube.org/nylon/unstable/sources/
http://.*/.*/ http://meshcube.org/nylon/unstable/sources/
}

View File

@ -0,0 +1,16 @@
#
# Because base.oeclasses set the variable
#
# do_fetch[nostamp] = "1"
# do_build[nostamp] = "1"
#
# for every build we're doing oemake calls all of the phases to check if
# something new is to download. This class unset's this nostamp flag. This
# makes a package "finished", once it's completely build.
#
# This means that the subsequent builds are faster, but when you change the
# behaviour of the package, e.g. by adding INHERIT="package_ipk", you won't
# get the ipk file except you delete the build stamp manually or all of them
# with oebuild clean <oe-file>.
do_build[nostamp] = ""

View File

@ -0,0 +1,174 @@
addtask lint before do_fetch
do_lint[nostamp] = 1
python do_lint() {
def testVar(var, explain=None):
try:
s = d[var]
return s["content"]
except KeyError:
bb.error("%s is not set" % var)
if explain: bb.note(explain)
return None
##############################
# Test that DESCRIPTION exists
#
testVar("DESCRIPTION")
##############################
# Test that HOMEPAGE exists
#
s = testVar("HOMEPAGE")
if s=="unknown":
bb.error("HOMEPAGE is not set")
elif not s.startswith("http://"):
bb.error("HOMEPAGE doesn't start with http://")
##############################
# Test for valid LICENSE
#
valid_licenses = {
"GPL-2" : "GPLv2",
"GPL LGPL FDL" : True,
"GPL PSF" : True,
"GPL/QPL" : True,
"GPL" : True,
"GPLv2" : True,
"IBM" : True,
"LGPL GPL" : True,
"LGPL" : True,
"MIT" : True,
"OSL" : True,
"Perl" : True,
"Public Domain" : True,
"QPL" : "GPL/QPL",
}
s = testVar("LICENSE")
if s=="unknown":
bb.error("LICENSE is not set")
elif s.startswith("Vendor"):
pass
else:
try:
newlic = valid_licenses[s]
if newlic == False:
bb.note("LICENSE '%s' is not recommended" % s)
elif newlic != True:
bb.note("LICENSE '%s' is not recommended, better use '%s'" % (s, newsect))
except:
bb.note("LICENSE '%s' is not recommended" % s)
##############################
# Test for valid MAINTAINER
#
s = testVar("MAINTAINER")
if s=="OpenEmbedded Team <oe@handhelds.org>":
bb.error("explicit MAINTAINER is missing, using default")
elif s and s.find("@") == -1:
bb.error("You forgot to put an e-mail address into MAINTAINER")
##############################
# Test for valid SECTION
#
# if Correct section: True section name is valid
# False section name is invalid, no suggestion
# string section name is invalid, better name suggested
#
valid_sections = {
# Current Section Correct section
"apps" : True,
"audio" : True,
"base" : True,
"console/games" : True,
"console/net" : "console/network",
"console/network" : True,
"console/utils" : True,
"devel" : True,
"developing" : "devel",
"devel/python" : True,
"fonts" : True,
"games" : True,
"games/libs" : True,
"gnome/base" : True,
"gnome/libs" : True,
"gpe" : True,
"gpe/libs" : True,
"gui" : False,
"libc" : "libs",
"libs" : True,
"libs/net" : True,
"multimedia" : True,
"net" : "network",
"NET" : "network",
"network" : True,
"opie/applets" : True,
"opie/applications" : True,
"opie/base" : True,
"opie/codecs" : True,
"opie/decorations" : True,
"opie/fontfactories" : True,
"opie/fonts" : True,
"opie/games" : True,
"opie/help" : True,
"opie/inputmethods" : True,
"opie/libs" : True,
"opie/multimedia" : True,
"opie/pim" : True,
"opie/setting" : "opie/settings",
"opie/settings" : True,
"opie/Shell" : False,
"opie/styles" : True,
"opie/today" : True,
"scientific" : True,
"utils" : True,
"x11" : True,
"x11/libs" : True,
"x11/wm" : True,
}
s = testVar("SECTION")
if s:
try:
newsect = valid_sections[s]
if newsect == False:
bb.note("SECTION '%s' is not recommended" % s)
elif newsect != True:
bb.note("SECTION '%s' is not recommended, better use '%s'" % (s, newsect))
except:
bb.note("SECTION '%s' is not recommended" % s)
if not s.islower():
bb.error("SECTION should only use lower case")
##############################
# Test for valid PRIORITY
#
valid_priorities = {
"standard" : True,
"required" : True,
"optional" : True,
"extra" : True,
}
s = testVar("PRIORITY")
if s:
try:
newprio = valid_priorities[s]
if newprio == False:
bb.note("PRIORITY '%s' is not recommended" % s)
elif newprio != True:
bb.note("PRIORITY '%s' is not recommended, better use '%s'" % (s, newprio))
except:
bb.note("PRIORITY '%s' is not recommended" % s)
if not s.islower():
bb.error("PRIORITY should only use lower case")
}

View File

@ -0,0 +1,99 @@
#
# This oeclass takes care about some of the itchy details of installing parts
# of Opie applications. Depending on quicklaunch or not, plugin or not, the
# TARGET is either a shared object, a shared object with a link to quicklauncher,
# or a usual binary.
#
# You have to provide two things: 1.) A proper SECTION field, and 2.) a proper APPNAME
# Then opie.oeclass will:
# * create the directory for the binary and install the binary file(s)
# * for applications: create the directory for the .desktop and install the .desktop file
# * for quicklauncher applications: create the startup symlink to the quicklauncher
# You can override the automatic detection of APPTYPE, valid values are 'quicklaunch', 'binary', 'plugin'
# You can override the default location of APPDESKTOP (<workdir>/apps/<section>/)
#
inherit palmtop
DEPENDS_prepend = "${@["libopie2 ", ""][(bb.data.getVar('PN', d, 1) == 'libopie2')]}"
# to be consistent, put all targets into workdir
EXTRA_QMAKEVARS_POST_append = " DESTDIR=${S}"
# Opie standard TAG value
TAG = "${@'v' + bb.data.getVar('PV',d,1).replace('.', '_')}"
# plan for later:
# add common scopes for opie applications, see qmake-native/common.pro
# qmake should care about all the details then. qmake can do that, i know it :)
#
python opie_do_opie_install() {
import os, shutil
section = bb.data.getVar( "SECTION", d ).split( '/' )[1] or "Applications"
section = section.title()
if section in ( "Base", "Libs" ):
bb.note( "Section = Base or Libs. Target won't be installed automatically." )
return
# SECTION : BINDIR DESKTOPDIR
dirmap = { "Applets" : ( "/plugins/applets", None ),
"Applications" : ( "<BINDIR>", "/apps/Applications" ),
"Multimedia" : ( "<BINDIR>", "/apps/Applications" ),
"Games" : ( "<BINDIR>", "/apps/Games" ),
"Settings" : ( "<BINDIR>", "/apps/Settings" ),
"Pim" : ( "<BINDIR>", "/apps/1Pim" ),
"Examples" : ( "<BINDIR>", "/apps/Examples" ),
"Shell" : ( "/bin", "/apps/Opie-SH" ),
"Codecs" : ( "/plugins/codecs", None ),
"Decorations" : ( "/plugins/decorations", None ),
"Inputmethods" : ( "/plugins/inputmethods", None ),
"Fontfactories" : ( "/plugins/fontfactories", None ),
"Security" : ( "/plugins/security", None ),
"Styles" : ( "/plugins/styles", None ),
"Today" : ( "/plugins/today", None ),
"Datebook" : ( "/plugins/holidays", None ),
"Networksettings" : ( "/plugins/networksettings", None ) }
if section not in dirmap:
raise ValueError, "Unknown section '%s'. Valid sections are: %s" % ( section, dirmap.keys() )
bindir, desktopdir = dirmap[section]
APPNAME = bb.data.getVar( "APPNAME", d, True ) or bb.data.getVar( "PN", d, True )
APPTYPE = bb.data.getVar( "APPTYPE", d, True )
if not APPTYPE:
if bindir == "<BINDIR>":
APPTYPE = "quicklaunch"
else:
APPTYPE = "plugin"
appmap = { "binary":"/bin", "quicklaunch":"/plugins/application" }
if bindir == "<BINDIR>": bindir = appmap[APPTYPE]
bb.note( "Section='%s', bindir='%s', desktopdir='%s', name='%s', type='%s'" %
( section, bindir, desktopdir, APPNAME, APPTYPE ) )
S = bb.data.getVar( "S", d, 1 )
D = "%s/image" % bb.data.getVar( "WORKDIR", d, True )
WORKDIR = bb.data.getVar( "WORKDIR", d, True )
palmtopdir = bb.data.getVar( "palmtopdir", d )
APPDESKTOP = bb.data.getVar( "APPDESKTOP", d, True ) or "%s/%s" % ( WORKDIR, desktopdir )
if desktopdir is not None:
os.system( "install -d %s%s%s/" % ( D, palmtopdir, desktopdir ) )
os.system( "install -m 0644 %s/%s.desktop %s%s%s/" % ( APPDESKTOP, APPNAME, D, palmtopdir, desktopdir ) )
os.system( "install -d %s%s%s/" % ( D, palmtopdir, bindir ) )
if APPTYPE == "binary":
os.system( "install -m 0755 %s/%s %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) )
elif APPTYPE == "quicklaunch":
os.system( "install -m 0755 %s/lib%s.so %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) )
os.system( "install -d %s%s/bin/" % ( D, palmtopdir ) )
os.system( "ln -sf %s/bin/quicklauncher %s%s/bin/%s" % ( palmtopdir, D, palmtopdir, APPNAME ) )
elif APPTYPE == "plugin":
os.system( "install -m 0755 %s/lib%s.so %s%s%s/" % ( S, APPNAME, D, palmtopdir, bindir ) )
}
EXPORT_FUNCTIONS do_opie_install
addtask opie_install after do_compile before do_populate_staging

View File

@ -0,0 +1,163 @@
# classes/opie_i18n.oeclass Matthias 'CoreDump' Hentges 16-10-2004
#
# Automatically builds i18n ipks for opie packages. It downloads opie-i18n from opie CVS
# and tries to guess the name of the .ts file based on the package name:
# ${PN}.ts, lib${PN}.ts and opie-${PN}.ts are all valid. The .ts "guessing" can be
# disabled by setting I18N_FILES in the .oe file.
#
# Todo:
#
I18N_STATS = "1"
SRC_URI += "${HANDHELDS_CVS};module=opie/i18n"
DEPENDS += "opie-i18n"
die () {
echo -e "opie_18n: ERROR: $1"
exit 1
}
python do_build_opie_i18n_data() {
import os, bb, re
workdir = bb.data.getVar("WORKDIR", d, 1)
packages = bb.data.getVar("PACKAGES", d, 1)
files = bb.data.getVar("FILES", d, 1)
section = bb.data.getVar("SECTION", d, 1)
pn = bb.data.getVar("PN", d, 1)
rdepends = bb.data.getVar("RDEPENDS", d, 1)
if os.path.exists(workdir + "/PACKAGES.tmp"):
fd = open(workdir + "/PACKAGES.tmp", 'r')
lines = fd.readlines()
fd.close()
bb.data.setVar('PACKAGES', " ".join(lines).lower() + " " + packages, d)
fd = open(workdir + "/FILES.tmp", 'r')
lines = fd.readlines()
fd.close()
for l in lines:
x = re.split("\#", l)
bb.data.setVar('FILES_%s' % x[0].lower(), " " + x[1].strip('\n'), d)
bb.data.setVar('SECTION_%s' % x[0].lower(), "opie/translations", d)
bb.data.setVar('RDEPENDS_%s' % x[0].lower(), pn, d)
bb.data.setVar('SECTION_%s' % pn, section, d)
bb.data.setVar('RDEPENDS', rdepends, d)
else:
bb.note("No translations found for package " + pn)
}
do_build_opie_i18n () {
cd "${WORKDIR}/i18n" || die "ERROR:\nCouldn't find Opies i18n sources in ${PN}/i18n\nMake sure that <inherit opie_i18n> or <inherit opie> is *below* <SRC_URIS =>!"
if test -z "${I18N_FILES}"
then
package_name="`echo "${PN}"| sed "s/^opie\-//"`"
package_name2="`echo "${PN}"| sed "s/^opie\-//;s/\-//"`"
test "$package_name" != "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts ${package_name2}.ts lib${package_name2}.ts opie-${package_name2}.ts"
test "$package_name" = "$package_name2" && I18N_FILES="${package_name}.ts lib${package_name}.ts opie-${package_name}.ts"
echo -e "I18N Datafiles: ${I18N_FILES} (auto-detected)\nYou can overide the auto-detection by setting I18N_FILES in your .oe file"
else
echo "I18N Datafiles: ${I18N_FILES} (provided by .bb)"
fi
rm -f "${WORKDIR}/FILES.tmp" "${WORKDIR}/PACKAGES.tmp"
echo -e "\nFILES is set to [${FILES}]\n"
for file in ${I18N_FILES}
do
echo "Working on [$file]"
for ts_file in `ls -1 */*.ts | egrep "/$file"`
do
echo -e "\tCompiling [$ts_file]"
cd "${WORKDIR}/i18n/`dirname $ts_file`" || die "[${WORKDIR}/i18n/`dirname $ts_file`] not found"
opie-lrelease "`basename $ts_file`" || die "lrelease failed! Make sure that <inherit opie_i18n> or <inherit opie> is *below* <DEPENDS =>!"
# $lang is the language as in de_DE, $lang_sane replaces "_" with "-"
# to allow packaging as "_" is not allowed in a package name
lang="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"`"
lang_sane="`echo "$ts_file" | sed -n "s#\(.*\)/\(.*\)#\1#p"|sed s/\_/\-/`"
echo -e "\tPackaging [`basename $ts_file`] for language [$lang]"
install -d ${D}${palmtopdir}/i18n/$lang
install -m 0644 ${WORKDIR}/i18n/$lang/.directory ${D}${palmtopdir}/i18n/$lang/
install -m 0644 ${WORKDIR}/i18n/$lang/*.qm "${D}${palmtopdir}/i18n/$lang/"
# As it is not possible to modify OE vars from within a _shell_ function,
# some major hacking was needed. These two files will be read by the python
# function do_build_opie_i18n_data() which sets the variables FILES_* and
# PACKAGES as needed.
echo -n "${PN}-${lang_sane} " >> "${WORKDIR}/PACKAGES.tmp"
echo -e "${PN}-${lang_sane}#${palmtopdir}/i18n/$lang" >> "${WORKDIR}/FILES.tmp"
ts_found_something=1
done
if test "$ts_found_something" != 1
then
echo -e "\tNo translations found"
else
ts_found_something=""
ts_found="$ts_found $file"
fi
# Only used for debugging purposes
test "${I18N_STATS}" = 1 && cd "${WORKDIR}/i18n"
echo -e "Completed [$file]\n\n"
done
qt_dirs="apps bin etc lib pics plugins share sounds"
for dir in $qt_dirs
do
dir_="$dir_ ${palmtopdir}/$dir "
done
# If we don't adjust FILES to exclude the i18n directory, we will end up with
# _lots_ of empty i18n/$lang directories in the original .ipk.
if (echo "${FILES}" | egrep "${palmtopdir}/? |${palmtopdir}/?$") &>/dev/null
then
echo "NOTE: FILES was set to ${palmtopdir} which would include the i18n directory"
echo -e "\n\nI'll remove ${palmtopdir} from FILES and replace it with all directories"
echo "below QtPalmtop, except i18n ($qt_dirs). See classes/opie_i18n.oeclass for details"
# Removes /opt/QtPalmtop from FILES but keeps /opt/QtPalmtop/$some_dir
FILES="`echo "$FILES"| sed "s#${palmtopdir}[/]\?\$\|${palmtopdir}[/]\? ##"`"
echo "${PN}#$FILES $dir_" >> "${WORKDIR}/FILES.tmp"
fi
# This is the common case for OPIE apps which are installed by opie.oeclass magic
if test -z "${FILES}"
then
echo "NOTE:"
echo -e "Since FILES is empty, i'll add all directories below ${palmtopdir} to it,\nexcluding i18n: ( $qt_dirs )"
echo "${PN}#$FILES $dir_" >> "${WORKDIR}/FILES.tmp"
fi
if ! test -e "${WORKDIR}/PACKAGES.tmp" -a "${I18N_STATS}" = 1
then
echo "No translations for package [${PN}]" >> /tmp/oe-i18n-missing.log
else
echo "Using [$ts_found ] for package [${PN}]" >> /tmp/oe-i18n.log
fi
# While this might not be very elegant, it safes a _ton_ of space (~30Mb) for
# each opie package.
for file in $(ls */*.ts | egrep -v "`echo "$ts_found"| sed "s/^\ //;s/\ /\|/"`")
do
rm "$file"
done
return 0
}
addtask build_opie_i18n before do_compile
addtask build_opie_i18n_data after do_build_opie_i18n before do_compile

View File

@ -0,0 +1,596 @@
def legitimize_package_name(s):
return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')
def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False):
import os, os.path, bb
dvar = bb.data.getVar('D', d, 1)
if not dvar:
bb.error("D not defined")
return
packages = bb.data.getVar('PACKAGES', d, 1).split()
if not packages:
# nothing to do
return
if postinst:
postinst = '#!/bin/sh\n' + postinst + '\n'
if postrm:
postrm = '#!/bin/sh\n' + postrm + '\n'
if not recursive:
objs = os.listdir(dvar + root)
else:
objs = []
for walkroot, dirs, files in os.walk(dvar + root):
for file in files:
relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1)
if relpath:
objs.append(relpath)
if extra_depends == None:
extra_depends = bb.data.getVar('PKG_' + packages[0], d, 1) or packages[0]
for o in objs:
import re, stat
if match_path:
m = re.match(file_regex, o)
else:
m = re.match(file_regex, os.path.basename(o))
if not m:
continue
f = os.path.join(dvar + root, o)
mode = os.lstat(f).st_mode
if not (stat.S_ISREG(mode) or (allow_dirs and stat.S_ISDIR(mode))):
continue
on = legitimize_package_name(m.group(1))
pkg = output_pattern % on
if not pkg in packages:
if prepend:
packages = [pkg] + packages
else:
packages.append(pkg)
the_files = [os.path.join(root, o)]
if aux_files_pattern:
if type(aux_files_pattern) is list:
for fp in aux_files_pattern:
the_files.append(fp % on)
else:
the_files.append(aux_files_pattern % on)
bb.data.setVar('FILES_' + pkg, " ".join(the_files), d)
if extra_depends != '':
the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, 1)
if the_depends:
the_depends = '%s %s' % (the_depends, extra_depends)
else:
the_depends = extra_depends
bb.data.setVar('RDEPENDS_' + pkg, the_depends, d)
bb.data.setVar('DESCRIPTION_' + pkg, description % on, d)
if postinst:
bb.data.setVar('pkg_postinst_' + pkg, postinst, d)
if postrm:
bb.data.setVar('pkg_postrm_' + pkg, postrm, d)
else:
oldfiles = bb.data.getVar('FILES_' + pkg, d, 1)
if not oldfiles:
bb.fatal("Package '%s' exists but has no files" % pkg)
bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d)
if callable(hook):
hook(f, pkg, file_regex, output_pattern, m.group(1))
bb.data.setVar('PACKAGES', ' '.join(packages), d)
python populate_packages () {
import glob, stat, errno, re
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
import os # path manipulations
outdir = bb.data.getVar('DEPLOY_DIR', d, 1)
if not outdir:
bb.error("DEPLOY_DIR not defined, unable to package")
return
bb.mkdirhier(outdir)
dvar = bb.data.getVar('D', d, 1)
if not dvar:
bb.error("D not defined, unable to package")
return
bb.mkdirhier(dvar)
packages = bb.data.getVar('PACKAGES', d, 1)
if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package")
return
pn = bb.data.getVar('PN', d, 1)
if not pn:
bb.error("PN not defined")
return
os.chdir(dvar)
def isexec(path):
try:
s = os.stat(path)
except (os.error, AttributeError):
return 0
return (s[stat.ST_MODE] & stat.S_IEXEC)
for pkg in packages.split():
localdata = bb.data.createCopy(d)
root = os.path.join(workdir, "install", pkg)
os.system('rm -rf %s' % root)
bb.data.setVar('ROOT', '', localdata)
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1)
if not pkgname:
pkgname = pkg
bb.data.setVar('PKG', pkgname, localdata)
overrides = bb.data.getVar('OVERRIDES', localdata, 1)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
bb.data.setVar('OVERRIDES', overrides+':'+pkg, localdata)
bb.data.update_data(localdata)
root = bb.data.getVar('ROOT', localdata, 1)
bb.mkdirhier(root)
filesvar = bb.data.getVar('FILES', localdata, 1) or ""
files = filesvar.split()
stripfunc = ""
for file in files:
if os.path.isabs(file):
file = '.' + file
if not os.path.islink(file):
if os.path.isdir(file):
newfiles = [ os.path.join(file,x) for x in os.listdir(file) ]
if newfiles:
files += newfiles
continue
globbed = glob.glob(file)
if globbed:
if [ file ] != globbed:
files += globbed
continue
if (not os.path.islink(file)) and (not os.path.exists(file)):
continue
fpath = os.path.join(root,file)
dpath = os.path.dirname(fpath)
bb.mkdirhier(dpath)
if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1') and not os.path.islink(file) and isexec(file):
stripfunc += "${STRIP} %s || : ;\n" % fpath
ret = bb.movefile(file,fpath)
if ret is None or ret == 0:
raise bb.build.FuncFailed("File population failed")
if not stripfunc == "":
from bb import build
# strip
bb.data.setVar('RUNSTRIP', '%s\nreturn 0' % stripfunc, localdata)
bb.data.setVarFlag('RUNSTRIP', 'func', 1, localdata)
bb.build.exec_func('RUNSTRIP', localdata)
del localdata
os.chdir(workdir)
unshipped = []
for root, dirs, files in os.walk(dvar):
for f in files:
path = os.path.join(root[len(dvar):], f)
unshipped.append(path)
if unshipped != []:
bb.note("the following files were installed but not shipped in any package:")
for f in unshipped:
bb.note(" " + f)
bb.build.exec_func("package_name_hook", d)
for pkg in packages.split():
if bb.data.getVar('PKG_%s' % pkg, d, 1) is None:
bb.data.setVar('PKG_%s' % pkg, pkg, d)
dangling_links = {}
pkg_files = {}
for pkg in packages.split():
dangling_links[pkg] = []
pkg_files[pkg] = []
inst_root = os.path.join(workdir, "install", pkg)
for root, dirs, files in os.walk(inst_root):
for f in files:
path = os.path.join(root, f)
rpath = path[len(inst_root):]
pkg_files[pkg].append(rpath)
try:
s = os.stat(path)
except OSError, (err, strerror):
if err != errno.ENOENT:
raise
target = os.readlink(path)
if target[0] != '/':
target = os.path.join(root[len(inst_root):], target)
dangling_links[pkg].append(os.path.normpath(target))
for pkg in packages.split():
rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "")
for l in dangling_links[pkg]:
found = False
bb.debug(1, "%s contains dangling link %s" % (pkg, l))
for p in packages.split():
for f in pkg_files[p]:
if f == l:
found = True
bb.debug(1, "target found in %s" % p)
if p == pkg:
break
dp = bb.data.getVar('PKG_' + p, d, 1) or p
if not dp in rdepends:
rdepends.append(dp)
break
if found == False:
bb.note("%s contains dangling symlink to %s" % (pkg, l))
bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d)
def write_if_exists(f, pkg, var):
def encode(str):
import codecs
c = codecs.getencoder("string_escape")
return c(str)[0]
val = bb.data.getVar('%s_%s' % (var, pkg), d, 1)
if val:
f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
data_file = os.path.join(workdir, "install", pn + ".package")
f = open(data_file, 'w')
f.write("PACKAGES: %s\n" % packages)
for pkg in packages.split():
write_if_exists(f, pkg, 'DESCRIPTION')
write_if_exists(f, pkg, 'RDEPENDS')
write_if_exists(f, pkg, 'RPROVIDES')
write_if_exists(f, pkg, 'PKG')
write_if_exists(f, pkg, 'ALLOW_EMPTY')
write_if_exists(f, pkg, 'FILES')
write_if_exists(f, pkg, 'pkg_postinst')
write_if_exists(f, pkg, 'pkg_postrm')
f.close()
bb.build.exec_func("read_subpackage_metadata", d)
}
ldconfig_postinst_fragment() {
if [ x"$D" = "x" ]; then
ldconfig
fi
}
python package_do_shlibs() {
import os, re, os.path
exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0)
if exclude_shlibs:
bb.note("not generating shlibs")
return
lib_re = re.compile("^lib.*\.so")
libdir_re = re.compile(".*/lib$")
packages = bb.data.getVar('PACKAGES', d, 1)
if not packages:
bb.debug(1, "no packages to build; not calculating shlibs")
return
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined")
return
staging = bb.data.getVar('STAGING_DIR', d, 1)
if not staging:
bb.error("STAGING_DIR not defined")
return
ver = bb.data.getVar('PV', d, 1)
if not ver:
bb.error("PV not defined")
return
target_sys = bb.data.getVar('TARGET_SYS', d, 1)
if not target_sys:
bb.error("TARGET_SYS not defined")
return
shlibs_dir = os.path.join(staging, target_sys, "shlibs")
old_shlibs_dir = os.path.join(staging, "shlibs")
bb.mkdirhier(shlibs_dir)
needed = {}
for pkg in packages.split():
needs_ldconfig = False
bb.debug(2, "calculating shlib provides for %s" % pkg)
pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1)
if not pkgname:
pkgname = pkg
needed[pkg] = []
sonames = list()
top = os.path.join(workdir, "install", pkg)
for root, dirs, files in os.walk(top):
for file in files:
soname = None
path = os.path.join(root, file)
if os.access(path, os.X_OK) or lib_re.match(file):
cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + path + " 2>/dev/null"
fd = os.popen(cmd)
lines = fd.readlines()
fd.close()
for l in lines:
m = re.match("\s+NEEDED\s+([^\s]*)", l)
if m:
needed[pkg].append(m.group(1))
m = re.match("\s+SONAME\s+([^\s]*)", l)
if m and not m.group(1) in sonames:
sonames.append(m.group(1))
if m and libdir_re.match(root):
needs_ldconfig = True
shlibs_file = os.path.join(shlibs_dir, pkgname + ".list")
if os.path.exists(shlibs_file):
os.remove(shlibs_file)
shver_file = os.path.join(shlibs_dir, pkgname + ".ver")
if os.path.exists(shver_file):
os.remove(shver_file)
if len(sonames):
fd = open(shlibs_file, 'w')
for s in sonames:
fd.write(s + '\n')
fd.close()
fd = open(shver_file, 'w')
fd.write(ver + '\n')
fd.close()
if needs_ldconfig:
bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('ldconfig_postinst_fragment', d, 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
shlib_provider = {}
list_re = re.compile('^(.*)\.list$')
for dir in [old_shlibs_dir, shlibs_dir]:
if not os.path.exists(dir):
continue
for file in os.listdir(dir):
m = list_re.match(file)
if m:
dep_pkg = m.group(1)
fd = open(os.path.join(dir, file))
lines = fd.readlines()
fd.close()
ver_file = os.path.join(dir, dep_pkg + '.ver')
lib_ver = None
if os.path.exists(ver_file):
fd = open(ver_file)
lib_ver = fd.readline().rstrip()
fd.close()
for l in lines:
shlib_provider[l.rstrip()] = (dep_pkg, lib_ver)
for pkg in packages.split():
bb.debug(2, "calculating shlib requirements for %s" % pkg)
p_pkg = bb.data.getVar("PKG_%s" % pkg, d, 1) or pkg
deps = list()
for n in needed[pkg]:
if n in shlib_provider.keys():
(dep_pkg, ver_needed) = shlib_provider[n]
if dep_pkg == p_pkg:
continue
if ver_needed:
dep = "%s (>= %s)" % (dep_pkg, ver_needed)
else:
dep = dep_pkg
if not dep in deps:
deps.append(dep)
else:
bb.note("Couldn't find shared library provider for %s" % n)
deps_file = os.path.join(workdir, "install", pkg + ".shlibdeps")
if os.path.exists(deps_file):
os.remove(deps_file)
if len(deps):
fd = open(deps_file, 'w')
for dep in deps:
fd.write(dep + '\n')
fd.close()
}
python package_do_pkgconfig () {
import re, os
packages = bb.data.getVar('PACKAGES', d, 1)
if not packages:
bb.debug(1, "no packages to build; not calculating pkgconfig dependencies")
return
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined")
return
staging = bb.data.getVar('STAGING_DIR', d, 1)
if not staging:
bb.error("STAGING_DIR not defined")
return
target_sys = bb.data.getVar('TARGET_SYS', d, 1)
if not target_sys:
bb.error("TARGET_SYS not defined")
return
shlibs_dir = os.path.join(staging, target_sys, "shlibs")
old_shlibs_dir = os.path.join(staging, "shlibs")
bb.mkdirhier(shlibs_dir)
pc_re = re.compile('(.*)\.pc$')
var_re = re.compile('(.*)=(.*)')
field_re = re.compile('(.*): (.*)')
pkgconfig_provided = {}
pkgconfig_needed = {}
for pkg in packages.split():
pkgconfig_provided[pkg] = []
pkgconfig_needed[pkg] = []
top = os.path.join(workdir, "install", pkg)
for root, dirs, files in os.walk(top):
for file in files:
m = pc_re.match(file)
if m:
pd = bb.data.init()
name = m.group(1)
pkgconfig_provided[pkg].append(name)
path = os.path.join(root, file)
if not os.access(path, os.R_OK):
continue
f = open(path, 'r')
lines = f.readlines()
f.close()
for l in lines:
m = var_re.match(l)
if m:
name = m.group(1)
val = m.group(2)
bb.data.setVar(name, bb.data.expand(val, pd), pd)
continue
m = field_re.match(l)
if m:
hdr = m.group(1)
exp = bb.data.expand(m.group(2), pd)
if hdr == 'Requires':
pkgconfig_needed[pkg] += exp.replace(',', ' ').split()
for pkg in packages.split():
ppkg = bb.data.getVar("PKG_" + pkg, d, 1) or pkg
pkgs_file = os.path.join(shlibs_dir, ppkg + ".pclist")
if os.path.exists(pkgs_file):
os.remove(pkgs_file)
if pkgconfig_provided[pkg] != []:
f = open(pkgs_file, 'w')
for p in pkgconfig_provided[pkg]:
f.write('%s\n' % p)
f.close()
for dir in [old_shlibs_dir, shlibs_dir]:
if not os.path.exists(dir):
continue
for file in os.listdir(dir):
m = re.match('^(.*)\.pclist$', file)
if m:
pkg = m.group(1)
fd = open(os.path.join(dir, file))
lines = fd.readlines()
fd.close()
pkgconfig_provided[pkg] = []
for l in lines:
pkgconfig_provided[pkg].append(l.rstrip())
for pkg in packages.split():
deps = []
for n in pkgconfig_needed[pkg]:
found = False
for k in pkgconfig_provided.keys():
if n in pkgconfig_provided[k]:
if k != pkg and not (k in deps):
deps.append(k)
found = True
if found == False:
bb.note("couldn't find pkgconfig module '%s' in any package" % n)
deps_file = os.path.join(workdir, "install", pkg + ".pcdeps")
if os.path.exists(deps_file):
os.remove(deps_file)
if len(deps):
fd = open(deps_file, 'w')
for dep in deps:
fd.write(dep + '\n')
fd.close()
}
python package_do_split_locales() {
import os
if (bb.data.getVar('PACKAGE_NO_LOCALE', d, 1) == '1'):
bb.debug(1, "package requested not splitting locales")
return
packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
if not packages:
bb.debug(1, "no packages to build; not splitting locales")
return
datadir = bb.data.getVar('datadir', d, 1)
if not datadir:
bb.note("datadir not defined")
return
dvar = bb.data.getVar('D', d, 1)
if not dvar:
bb.error("D not defined")
return
pn = bb.data.getVar('PN', d, 1)
if not pn:
bb.error("PN not defined")
return
if pn + '-locale' in packages:
packages.remove(pn + '-locale')
localedir = os.path.join(dvar + datadir, 'locale')
if not os.path.isdir(localedir):
bb.debug(1, "No locale files in this package")
return
locales = os.listdir(localedir)
mainpkg = packages[0]
for l in locales:
ln = legitimize_package_name(l)
pkg = pn + '-locale-' + ln
packages.append(pkg)
bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d)
bb.data.setVar('RDEPENDS_' + pkg, '${PKG_%s} virtual-locale-%s' % (mainpkg, ln), d)
bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d)
bb.data.setVar('DESCRIPTION_' + pkg, '%s translation for %s' % (l, pn), d)
bb.data.setVar('PACKAGES', ' '.join(packages), d)
rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split()
rdep.append('%s-locale*' % pn)
bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d)
}
PACKAGEFUNCS = "do_install package_do_split_locales \
populate_packages package_do_shlibs \
package_do_pkgconfig read_shlibdeps"
python package_do_package () {
for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split():
bb.build.exec_func(f, d)
}
do_package[dirs] = "${D}"
populate_packages[dirs] = "${D}"
EXPORT_FUNCTIONS do_package do_shlibs do_split_locales
addtask package before do_build after do_populate_staging

View File

@ -0,0 +1,231 @@
inherit package
DEPENDS_prepend="${@["ipkg-utils-native ", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"
BOOTSTRAP_EXTRA_RDEPENDS += "ipkg-collateral ipkg ipkg-link"
BOOTSTRAP_EXTRA_DEPENDS += "ipkg-collateral ipkg ipkg-link"
PACKAGEFUNCS += "do_package_ipk"
python package_ipk_fn () {
from bb import data
bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
}
python package_ipk_install () {
import os, sys
pkg = bb.data.getVar('PKG', d, 1)
pkgfn = bb.data.getVar('PKGFN', d, 1)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
ipkdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
stagingdir = bb.data.getVar('STAGING_DIR', d, 1)
tmpdir = bb.data.getVar('TMPDIR', d, 1)
if None in (pkg,pkgfn,rootfs):
raise bb.build.FuncFailed("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
try:
bb.mkdirhier(rootfs)
os.chdir(rootfs)
except OSError:
(type, value, traceback) = sys.exc_info()
print value
raise bb.build.FuncFailed
# Generate ipk.conf if it or the stamp doesnt exist
conffile = os.path.join(stagingdir,"ipkg.conf")
if not os.access(conffile, os.R_OK):
ipkg_archs = bb.data.getVar('IPKG_ARCHS',d)
if ipkg_archs is None:
bb.error("IPKG_ARCHS missing")
raise FuncFailed
ipkg_archs = ipkg_archs.split()
arch_priority = 1
f = open(conffile,"w")
for arch in ipkg_archs:
f.write("arch %s %s\n" % ( arch, arch_priority ))
arch_priority += 1
f.write("src local file:%s" % ipkdir)
f.close()
if (not os.access(os.path.join(ipkdir,"Packages"), os.R_OK) or
not os.access(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),os.R_OK)):
ret = os.system('ipkg-make-index -p %s %s ' % (os.path.join(ipkdir, "Packages"), ipkdir))
if (ret != 0 ):
raise bb.build.FuncFailed
f=open(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),"w")
f.close()
ret = os.system('ipkg-cl -o %s -f %s update' % (rootfs, conffile))
ret = os.system('ipkg-cl -o %s -f %s install %s' % (rootfs, conffile, pkgfn))
if (ret != 0 ):
raise bb.build.FuncFailed
}
python do_package_ipk () {
import copy # to back up env data
import sys
import re
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
import os # path manipulations
outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
if not outdir:
bb.error("DEPLOY_DIR_IPK not defined, unable to package")
return
bb.mkdirhier(outdir)
dvar = bb.data.getVar('D', d, 1)
if not dvar:
bb.error("D not defined, unable to package")
return
bb.mkdirhier(dvar)
packages = bb.data.getVar('PACKAGES', d, 1)
if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package")
return
tmpdir = bb.data.getVar('TMPDIR', d, 1)
# Invalidate the packages file
if os.access(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"),os.R_OK):
os.unlink(os.path.join(os.path.join(tmpdir, "stamps"),"do_packages"))
if packages == []:
bb.debug(1, "No packages; nothing to do")
return
for pkg in packages.split():
localdata = bb.data.createCopy(d)
root = "%s/install/%s" % (workdir, pkg)
bb.data.setVar('ROOT', '', localdata)
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1)
if not pkgname:
pkgname = pkg
bb.data.setVar('PKG', pkgname, localdata)
overrides = bb.data.getVar('OVERRIDES', localdata)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata)
bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata)
bb.data.update_data(localdata)
basedir = os.path.join(os.path.dirname(root))
pkgoutdir = outdir
bb.mkdirhier(pkgoutdir)
os.chdir(root)
from glob import glob
g = glob('*')
try:
del g[g.index('CONTROL')]
del g[g.index('./CONTROL')]
except ValueError:
pass
if not g and not bb.data.getVar('ALLOW_EMPTY', localdata):
from bb import note
note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
continue
controldir = os.path.join(root, 'CONTROL')
bb.mkdirhier(controldir)
try:
ctrlfile = file(os.path.join(controldir, 'control'), 'w')
except OSError:
raise bb.build.FuncFailed("unable to open control file for writing.")
fields = []
fields.append(["Version: %s-%s\n", ['PV', 'PR']])
fields.append(["Description: %s\n", ['DESCRIPTION']])
fields.append(["Section: %s\n", ['SECTION']])
fields.append(["Priority: %s\n", ['PRIORITY']])
fields.append(["Maintainer: %s\n", ['MAINTAINER']])
fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']])
fields.append(["OE: %s\n", ['P']])
def pullData(l, d):
l2 = []
for i in l:
l2.append(bb.data.getVar(i, d, 1))
return l2
ctrlfile.write("Package: %s\n" % pkgname)
# check for required fields
try:
for (c, fs) in fields:
for f in fs:
if bb.data.getVar(f, localdata) is None:
raise KeyError(f)
ctrlfile.write(c % tuple(pullData(fs, localdata)))
except KeyError:
(type, value, traceback) = sys.exc_info()
ctrlfile.close()
raise bb.build.FuncFailed("Missing field for ipk generation: %s" % value)
# more fields
rdepends = explode_deps(bb.data.getVar("RDEPENDS", localdata, 1) or "")
rrecommends = explode_deps(bb.data.getVar("RRECOMMENDS", localdata, 1) or "")
rsuggests = (bb.data.getVar("RSUGGESTS", localdata, 1) or "").split()
rprovides = (bb.data.getVar("RPROVIDES", localdata, 1) or "").split()
rreplaces = (bb.data.getVar("RREPLACES", localdata, 1) or "").split()
rconflicts = (bb.data.getVar("RCONFLICTS", localdata, 1) or "").split()
if rdepends:
ctrlfile.write("Depends: %s\n" % ", ".join(rdepends))
if rsuggests:
ctrlfile.write("Suggests: %s\n" % ", ".join(rsuggests))
if rrecommends:
ctrlfile.write("Recommends: %s\n" % ", ".join(rrecommends))
if rprovides:
ctrlfile.write("Provides: %s\n" % ", ".join(rprovides))
if rreplaces:
ctrlfile.write("Replaces: %s\n" % ", ".join(rreplaces))
if rconflicts:
ctrlfile.write("Conflicts: %s\n" % ", ".join(rconflicts))
src_uri = bb.data.getVar("SRC_URI", localdata, 1)
if src_uri:
src_uri = re.sub("\s+", " ", src_uri)
ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
ctrlfile.close()
for script in ["preinst", "postinst", "prerm", "postrm"]:
scriptvar = bb.data.getVar('pkg_%s' % script, localdata, 1)
if not scriptvar:
continue
try:
scriptfile = file(os.path.join(controldir, script), 'w')
except OSError:
raise bb.build.FuncFailed("unable to open %s script file for writing." % script)
scriptfile.write(scriptvar)
scriptfile.close()
os.chmod(os.path.join(controldir, script), 0755)
conffiles_str = bb.data.getVar("CONFFILES", localdata, 1)
if conffiles_str:
try:
conffiles = file(os.path.join(controldir, 'conffiles'), 'w')
except OSError:
raise bb.build.FuncFailed("unable to open conffiles for writing.")
for f in conffiles_str.split():
conffiles.write('%s\n' % f)
conffiles.close()
os.chdir(basedir)
ret = os.system("PATH=\"%s\" %s %s %s" % (bb.data.getVar("PATH", localdata, 1),
bb.data.getVar("IPKGBUILDCMD",d,1), pkg, pkgoutdir))
if ret != 0:
raise bb.build.FuncFailed("ipkg-build execution failed")
for script in ["preinst", "postinst", "prerm", "postrm", "control" ]:
scriptfile = os.path.join(controldir, script)
try:
os.remove(scriptfile)
except OSError:
pass
try:
os.rmdir(controldir)
except OSError:
pass
del localdata
}

View File

@ -0,0 +1,133 @@
inherit package
inherit rpm_core
RPMBUILD="rpmbuild --short-circuit ${RPMOPTS}"
PACKAGEFUNCS += "do_package_rpm"
python write_specfile() {
from bb import data, build
import sys
out_vartranslate = {
"PKG": "Name",
"PV": "Version",
"PR": "Release",
"DESCRIPTION": "%description",
"ROOT": "BuildRoot",
"LICENSE": "License",
"SECTION": "Group",
}
root = bb.data.getVar('ROOT', d)
# get %files
filesvar = bb.data.expand(bb.data.getVar('FILES', d), d) or ""
from glob import glob
files = filesvar.split()
todelete = []
for file in files:
if file[0] == '.':
newfile = file[1:]
files[files.index(file)] = newfile
file = newfile
else:
newfile = file
realfile = os.path.join(root, './'+file)
if not glob(realfile):
todelete.append(files[files.index(newfile)])
for r in todelete:
try:
del files[files.index(r)]
except ValueError:
pass
if not files:
from bb import note
note("Not creating empty archive for %s-%s-%s" % (bb.data.getVar('PKG',d, 1), bb.data.getVar('PV', d, 1), bb.data.getVar('PR', d, 1)))
return
# output .spec using this metadata store
try:
from __builtin__ import file
if not bb.data.getVar('OUTSPECFILE', d):
raise OSError('eek!')
specfile = file(bb.data.getVar('OUTSPECFILE', d), 'w')
except OSError:
raise bb.build.FuncFailed("unable to open spec file for writing.")
# fd = sys.__stdout__
fd = specfile
for var in out_vartranslate.keys():
if out_vartranslate[var][0] == "%":
continue
fd.write("%s\t: %s\n" % (out_vartranslate[var], bb.data.getVar(var, d)))
fd.write("Summary\t: .\n")
for var in out_vartranslate.keys():
if out_vartranslate[var][0] != "%":
continue
fd.write(out_vartranslate[var] + "\n")
fd.write(bb.data.getVar(var, d) + "\n\n")
fd.write("%files\n")
for file in files:
fd.write("%s\n" % file)
fd.close()
# call out rpm -bb on the .spec, thereby creating an rpm
bb.data.setVar('BUILDSPEC', "${RPMBUILD} -bb ${OUTSPECFILE}\n", d)
bb.data.setVarFlag('BUILDSPEC', 'func', '1', d)
bb.build.exec_func('BUILDSPEC', d)
# move the rpm into the pkgoutdir
rpm = bb.data.expand('${RPMBUILDPATH}/RPMS/${TARGET_ARCH}/${PKG}-${PV}-${PR}.${TARGET_ARCH}.rpm', d)
outrpm = bb.data.expand('${DEPLOY_DIR_RPM}/${PKG}-${PV}-${PR}.${TARGET_ARCH}.rpm', d)
bb.movefile(rpm, outrpm)
}
python do_package_rpm () {
workdir = bb.data.getVar('WORKDIR', d)
if not workdir:
raise bb.build.FuncFailed("WORKDIR not defined")
workdir = bb.data.expand(workdir, d)
import os # path manipulations
outdir = bb.data.getVar('DEPLOY_DIR_RPM', d)
if not outdir:
raise bb.build.FuncFailed("DEPLOY_DIR_RPM not defined")
outdir = bb.data.expand(outdir, d)
bb.mkdirhier(outdir)
packages = bb.data.getVar('PACKAGES', d)
if not packages:
packages = "${PN}"
bb.data.setVar('FILES', '', d)
ddir = bb.data.expand(bb.data.getVar('D', d), d)
bb.mkdirhier(ddir)
bb.data.setVar(bb.data.expand('FILES_${PN}', d), ''.join([ "./%s" % x for x in os.listdir(ddir)]), d)
packages = bb.data.expand(packages, d)
for pkg in packages.split():
localdata = bb.data.createCopy(d)
root = "%s/install/%s" % (workdir, pkg)
bb.data.setVar('ROOT', '', localdata)
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
bb.data.setVar('PKG', pkg, localdata)
overrides = bb.data.getVar('OVERRIDES', localdata)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata)
bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata)
bb.data.update_data(localdata)
# stuff
root = bb.data.getVar('ROOT', localdata)
basedir = os.path.dirname(root)
pkgoutdir = outdir
bb.mkdirhier(pkgoutdir)
bb.data.setVar('OUTSPECFILE', os.path.join(workdir, "%s.spec" % pkg), localdata)
bb.build.exec_func('write_specfile', localdata)
del localdata
}

View File

@ -0,0 +1,99 @@
inherit package
PACKAGEFUNCS += "do_package_tar"
python package_tar_fn () {
import os
from bb import data
fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d), "%s-%s-%s.tar.gz" % (bb.data.getVar('PKG', d), bb.data.getVar('PV', d), bb.data.getVar('PR', d)))
fn = bb.data.expand(fn, d)
bb.data.setVar('PKGFN', fn, d)
}
python package_tar_install () {
import os, sys
pkg = bb.data.getVar('PKG', d, 1)
pkgfn = bb.data.getVar('PKGFN', d, 1)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
if None in (pkg,pkgfn,rootfs):
bb.error("missing variables (one or more of PKG, PKGFN, IMAGEROOTFS)")
raise bb.build.FuncFailed
try:
bb.mkdirhier(rootfs)
os.chdir(rootfs)
except OSError:
(type, value, traceback) = sys.exc_info()
print value
raise bb.build.FuncFailed
if not os.access(pkgfn, os.R_OK):
bb.debug(1, "%s does not exist, skipping" % pkgfn)
raise bb.build.FuncFailed
ret = os.system('zcat %s | tar -xf -' % pkgfn)
if ret != 0:
raise bb.build.FuncFailed
}
python do_package_tar () {
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
import os # path manipulations
outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1)
if not outdir:
bb.error("DEPLOY_DIR_TAR not defined, unable to package")
return
bb.mkdirhier(outdir)
dvar = bb.data.getVar('D', d, 1)
if not dvar:
bb.error("D not defined, unable to package")
return
bb.mkdirhier(dvar)
packages = bb.data.getVar('PACKAGES', d, 1)
if not packages:
bb.debug(1, "PACKAGES not defined, nothing to package")
return
for pkg in packages.split():
localdata = bb.data.createCopy(d)
root = "%s/install/%s" % (workdir, pkg)
bb.data.setVar('ROOT', '', localdata)
bb.data.setVar('ROOT_%s' % pkg, root, localdata)
bb.data.setVar('PKG', pkg, localdata)
overrides = bb.data.getVar('OVERRIDES', localdata)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
overrides = bb.data.expand(overrides, localdata)
bb.data.setVar('OVERRIDES', '%s:%s' % (overrides, pkg), localdata)
bb.data.update_data(localdata)
# stuff
root = bb.data.getVar('ROOT', localdata)
bb.mkdirhier(root)
basedir = os.path.dirname(root)
pkgoutdir = outdir
bb.mkdirhier(pkgoutdir)
bb.build.exec_func('package_tar_fn', localdata)
tarfn = bb.data.getVar('PKGFN', localdata, 1)
# if os.path.exists(tarfn):
# del localdata
# continue
os.chdir(root)
from glob import glob
if not glob('*'):
bb.note("Not creating empty archive for %s-%s-%s" % (pkg, bb.data.getVar('PV', localdata, 1), bb.data.getVar('PR', localdata, 1)))
continue
ret = os.system("tar -czvf %s %s" % (tarfn, '.'))
if ret != 0:
bb.error("Creation of tar %s failed." % tarfn)
# end stuff
del localdata
}

View File

@ -0,0 +1,10 @@
# basically a placeholder for something more fancy
# for now, just declare some things
inherit qmake
EXTRA_QMAKEVARS_POST_append = " DEFINES+=QWS LIBS+=-lqpe CONFIG+=qt LIBS-=-lstdc++ LIBS+=-lsupc++"
DEPENDS_prepend = "virtual/libqpe uicmoc-native "
FILES_${PN} = "${palmtopdir}"

View File

@ -0,0 +1,7 @@
# Now that BitBake/OpenEmbedded uses Quilt by default, you can simply add an
# inherit patcher
# to one of your config files to let BB/OE use patcher again.
PATCHCLEANCMD = "patcher -B"
PATCHCMD = "patcher -R -p '%s' -n '%s' -i '%s'"
PATCH_DEPENDS = "${@["patcher-native", ""][(bb.data.getVar('PN', d, 1) == 'patcher-native')]}"

View File

@ -0,0 +1,29 @@
PKG_DISTRIBUTECOMMAND[func] = "1"
python do_distribute_packages () {
cmd = bb.data.getVar('PKG_DISTRIBUTECOMMAND', d, 1)
if not cmd:
raise bb.build.FuncFailed("Unable to distribute packages, PKG_DISTRIBUTECOMMAND not defined")
bb.build.exec_func('PKG_DISTRIBUTECOMMAND', d)
}
addtask distribute_packages before do_build after do_fetch
PKG_DIST_LOCAL ?= "symlink"
PKG_DISTRIBUTEDIR ?= "${DEPLOY_DIR}/packages"
PKG_DISTRIBUTECOMMAND () {
p=`dirname ${FILE}`
d=`basename $p`
mkdir -p ${PKG_DISTRIBUTEDIR}
case "${PKG_DIST_LOCAL}" in
copy)
# use this weird tar command to copy because we want to
# exclude the BitKeeper directories
test -e ${PKG_DISTRIBUTEDIR}/${d} || mkdir ${PKG_DISTRIBUTEDIR}/${d};
(cd ${p}; tar -c --exclude SCCS -f - . ) | tar -C ${PKG_DISTRIBUTEDIR}/${d} -xpf -
;;
symlink)
ln -sf $p ${PKG_DISTRIBUTEDIR}/
;;
esac
}

View File

@ -0,0 +1,22 @@
python do_pkg_write_metainfo () {
deploydir = bb.data.getVar('DEPLOY_DIR', d, 1)
if not deploydir:
bb.error("DEPLOY_DIR not defined, unable to write package info")
return
try:
infofile = file(os.path.join(deploydir, 'package-metainfo'), 'a')
except OSError:
raise bb.build.FuncFailed("unable to open package-info file for writing.")
name = bb.data.getVar('PN', d, 1)
version = bb.data.getVar('PV', d, 1)
desc = bb.data.getVar('DESCRIPTION', d, 1)
page = bb.data.getVar('HOMEPAGE', d, 1)
lic = bb.data.getVar('LICENSE', d, 1)
infofile.write("|| "+ name +" || "+ version + " || "+ desc +" || "+ page +" || "+ lic + " ||\n" )
infofile.close()
}
addtask pkg_write_metainfo after do_package before do_build

View File

@ -0,0 +1,28 @@
inherit base
DEPENDS_prepend = "pkgconfig-native "
# The namespaces can clash here hence the two step replace
def get_pkgconfig_mangle(d):
import bb.data
s = "-e ''"
if not bb.data.inherits_class('native', d):
s += " -e 's:=${libdir}:=OELIBDIR:;'"
s += " -e 's:=${includedir}:=OEINCDIR:;'"
s += " -e 's:=${datadir}:=OEDATADIR:'"
s += " -e 's:=${prefix}:=OEPREFIX:'"
s += " -e 's:=${exec_prefix}:=OEEXECPREFIX:'"
s += " -e 's:OELIBDIR:${STAGING_LIBDIR}:;'"
s += " -e 's:OEINCDIR:${STAGING_INCDIR}:;'"
s += " -e 's:OEDATADIR:${STAGING_DATADIR}:'"
s += " -e 's:OEPREFIX:${STAGING_LIBDIR}/..:'"
s += " -e 's:OEEXECPREFIX:${STAGING_LIBDIR}/..:'"
return s
do_stage_append () {
for pc in `find ${S} -name '*.pc'`; do
pcname=`basename $pc`
install -d ${PKG_CONFIG_PATH}
cat $pc | sed ${@get_pkgconfig_mangle(d)} > ${PKG_CONFIG_PATH}/$pcname
done
}

View File

@ -0,0 +1,42 @@
DEPENDS_prepend = "qmake-native "
OE_QMAKE_PLATFORM = "${TARGET_OS}-oe-g++"
QMAKESPEC := "${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}"
# We override this completely to eliminate the -e normally passed in
EXTRA_OEMAKE = ' MAKEFLAGS= '
export OE_QMAKE_CC="${CC}"
export OE_QMAKE_CFLAGS="${CFLAGS}"
export OE_QMAKE_CXX="${CXX}"
export OE_QMAKE_CXXFLAGS="-fno-exceptions -fno-rtti ${CXXFLAGS}"
export OE_QMAKE_LDFLAGS="${LDFLAGS}"
export OE_QMAKE_LINK="${CCLD}"
export OE_QMAKE_AR="${AR}"
export OE_QMAKE_STRIP="${STRIP}"
export OE_QMAKE_UIC="${STAGING_BINDIR}/uic"
export OE_QMAKE_MOC="${STAGING_BINDIR}/moc"
export OE_QMAKE_RPATH="-Wl,-rpath-link,"
# default to qte2 via bb.conf, inherit qt3x11 to configure for qt3x11
export OE_QMAKE_INCDIR_QT="${QTDIR}/include"
export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib"
export OE_QMAKE_LIBS_QT="qte"
export OE_QMAKE_LIBS_X11=""
oe_qmake_mkspecs () {
mkdir -p mkspecs/${OE_QMAKE_PLATFORM}
for f in ${QMAKE_MKSPEC_PATH}/${OE_QMAKE_PLATFORM}/*; do
if [ -L $f ]; then
lnk=`readlink $f`
if [ -f mkspecs/${OE_QMAKE_PLATFORM}/$lnk ]; then
ln -s $lnk mkspecs/${OE_QMAKE_PLATFORM}/`basename $f`
else
cp $f mkspecs/${OE_QMAKE_PLATFORM}/
fi
else
cp $f mkspecs/${OE_QMAKE_PLATFORM}/
fi
done
}

View File

@ -0,0 +1,57 @@
inherit qmake-base
qmake_do_configure() {
case ${QMAKESPEC} in
*linux-oe-g++|*linux-uclibc-oe-g++)
;;
*-oe-g++)
die Unsupported target ${TARGET_OS} for oe-g++ qmake spec
;;
*)
oenote Searching for qmake spec file
paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-${TARGET_ARCH}-g++"
paths="${QMAKE_MKSPEC_PATH}/${TARGET_OS}-g++ $paths"
if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then
paths="${QMAKE_MKSPEC_PATH}/qws/${TARGET_OS}-x86-g++ $paths"
fi
for i in $paths; do
if test -e $i; then
export QMAKESPEC=$i
break
fi
done
;;
esac
oenote "using qmake spec in ${QMAKESPEC}, using profiles '${QMAKE_PROFILES}'"
if [ -z "${QMAKE_PROFILES}" ]; then
PROFILES="`ls *.pro`"
else
PROFILES="${QMAKE_PROFILES}"
fi
if [ -z "$PROFILES" ]; then
die "QMAKE_PROFILES not set and no profiles found in $PWD"
fi
if [ ! -z "${EXTRA_QMAKEVARS_POST}" ]; then
AFTER="-after"
QMAKE_VARSUBST_POST="${EXTRA_QMAKEVARS_POST}"
oenote "qmake postvar substitution: ${EXTRA_QMAKEVARS_POST}"
fi
if [ ! -z "${EXTRA_QMAKEVARS_PRE}" ]; then
QMAKE_VARSUBST_PRE="${EXTRA_QMAKEVARS_PRE}"
oenote "qmake prevar substitution: ${EXTRA_QMAKEVARS_PRE}"
fi
#oenote "Calling 'qmake -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST'"
unset QMAKESPEC || true
qmake -makefile -spec ${QMAKESPEC} -o Makefile $QMAKE_VARSUBST_PRE $AFTER $PROFILES $QMAKE_VARSUBST_POST || die "Error calling qmake on $PROFILES"
}
EXPORT_FUNCTIONS do_configure
addtask configure after do_unpack do_patch before do_compile

View File

@ -0,0 +1,36 @@
PACKAGE_ARCH = "all"
do_configure() {
:
}
do_compile() {
:
}
pkg_postinst_fonts() {
#!/bin/sh
set -e
. /etc/profile
${sbindir}/update-qtfontdir
}
pkg_postrm_fonts() {
#!/bin/sh
set -e
. /etc/profile
${sbindir}/update-qtfontdir -f
}
python populate_packages_prepend() {
postinst = bb.data.getVar('pkg_postinst_fonts', d, 1)
postrm = bb.data.getVar('pkg_postrm_fonts', d, 1)
fontdir = bb.data.getVar('palmtopdir', d, 1) + '/lib/fonts'
pkgregex = "^([a-z-]*_[0-9]*).*.qpf$"
pkgpattern = bb.data.getVar('QPF_PKGPATTERN', d, 1) or 'qpf-%s'
pkgdescription = bb.data.getVar('QPF_DESCRIPTION', d, 1) or 'QPF font %s'
do_split_packages(d, root=fontdir, file_regex=pkgregex, output_pattern=pkgpattern,
description=pkgdescription, postinst=postinst, postrm=postrm, recursive=True, hook=None,
extra_depends='qpf-font-common')
}

View File

@ -0,0 +1,11 @@
#
# override variables set by qmake-base to compile Qt/X11 apps
#
export QTDIR="${STAGING_DIR}/${HOST_SYS}/qte3"
export QTEDIR="${STAGING_DIR}/${HOST_SYS}/qte3"
export OE_QMAKE_UIC="${STAGING_BINDIR}/uic3"
export OE_QMAKE_MOC="${STAGING_BINDIR}/moc3"
export OE_QMAKE_CXXFLAGS="${CXXFLAGS} "
export OE_QMAKE_INCDIR_QT="${QTEDIR}/include"
export OE_QMAKE_LIBDIR_QT="${QTEDIR}/lib"
export OE_QMAKE_LIBS_QT="qte"

View File

@ -0,0 +1,11 @@
#
# override variables set by qmake-base to compile Qt/X11 apps
#
export QTDIR="${STAGING_DIR}/${HOST_SYS}/qt3"
export OE_QMAKE_UIC="${STAGING_BINDIR}/uic3"
export OE_QMAKE_MOC="${STAGING_BINDIR}/moc3"
export OE_QMAKE_CXXFLAGS="${CXXFLAGS} -DQT_NO_XIM"
export OE_QMAKE_INCDIR_QT="${QTDIR}/include"
export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib"
export OE_QMAKE_LIBS_QT="qt"
export OE_QMAKE_LIBS_X11="-lXext -lX11 -lm"

View File

@ -0,0 +1,11 @@
#
# override variables set by qmake-base to compile Qt/X11 apps
#
export QTDIR="${STAGING_DIR}/${HOST_SYS}/qt4"
export OE_QMAKE_UIC="${STAGING_BINDIR}/uic4"
export OE_QMAKE_MOC="${STAGING_BINDIR}/moc4"
export OE_QMAKE_CXXFLAGS="${CXXFLAGS}"
export OE_QMAKE_INCDIR_QT="${QTDIR}/include"
export OE_QMAKE_LIBDIR_QT="${QTDIR}/lib"
export OE_QMAKE_LIBS_QT="qt"
export OE_QMAKE_LIBS_X11="-lXext -lX11 -lm"

View File

@ -0,0 +1,22 @@
#
# Removes source after build
#
# To use it add that line to conf/local.conf:
#
# INHERIT += "rm_work"
#
do_rm_work () {
cd ${WORKDIR}
for dir in *
do
if [ `basename ${S}` == $dir ]; then
rm -rf $dir/*
elif [ $dir != 'temp' ]; then
rm -rf $dir
fi
done
}
addtask rm_work before do_build
addtask rm_work after do_package

View File

@ -0,0 +1,136 @@
#
# Creates a root filesystem out of IPKs
#
# This rootfs can be mounted via root-nfs or it can be put into an cramfs/jffs etc.
# See image_ipk.oeclass for a usage of this.
#
DEPENDS_prepend="ipkg-native ipkg-utils-native fakeroot-native "
DEPENDS_append=" ${EXTRA_IMAGEDEPENDS}"
PACKAGES = ""
do_rootfs[nostamp] = 1
do_rootfs[dirs] = ${TOPDIR}
do_build[nostamp] = 1
IPKG_ARGS = "-f ${T}/ipkg.conf -o ${IMAGE_ROOTFS}"
ROOTFS_POSTPROCESS_COMMAND ?= ""
PID = "${@os.getpid()}"
# some default locales
IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
LINGUAS_INSTALL = "${@" ".join(map(lambda s: "locale-base-%s" % s, bb.data.getVar('IMAGE_LINGUAS', d, 1).split()))}"
real_do_rootfs () {
set -x
mkdir -p ${IMAGE_ROOTFS}/dev
if [ -z "${DEPLOY_KEEP_PACKAGES}" ]; then
rm -f ${DEPLOY_DIR_IPK}/Packages
touch ${DEPLOY_DIR_IPK}/Packages
ipkg-make-index -r ${DEPLOY_DIR_IPK}/Packages -p ${DEPLOY_DIR_IPK}/Packages -l ${DEPLOY_DIR_IPK}/Packages.filelist -m ${DEPLOY_DIR_IPK}
fi
mkdir -p ${T}
echo "src oe file:${DEPLOY_DIR_IPK}" > ${T}/ipkg.conf
ipkgarchs="all any noarch ${TARGET_ARCH} ${IPKG_ARCHS} ${MACHINE}"
priority=1
for arch in $ipkgarchs; do
echo "arch $arch $priority" >> ${T}/ipkg.conf
priority=$(expr $priority + 5)
done
ipkg-cl ${IPKG_ARGS} update
if [ ! -z "${LINGUAS_INSTALL}" ]; then
ipkg-cl ${IPKG_ARGS} install glibc-localedata-i18n
for i in ${LINGUAS_INSTALL}; do
ipkg-cl ${IPKG_ARGS} install $i
done
fi
if [ ! -z "${IPKG_INSTALL}" ]; then
ipkg-cl ${IPKG_ARGS} install ${IPKG_INSTALL}
fi
export D=${IMAGE_ROOTFS}
export IPKG_OFFLINE_ROOT=${IMAGE_ROOTFS}
mkdir -p ${IMAGE_ROOTFS}/etc/ipkg/
grep "^arch" ${T}/ipkg.conf >${IMAGE_ROOTFS}/etc/ipkg/arch.conf
for i in ${IMAGE_ROOTFS}${libdir}/ipkg/info/*.preinst; do
if [ -f $i ] && ! sh $i; then
ipkg-cl ${IPKG_ARGS} flag unpacked `basename $i .preinst`
fi
done
for i in ${IMAGE_ROOTFS}${libdir}/ipkg/info/*.postinst; do
if [ -f $i ] && ! sh $i configure; then
ipkg-cl ${IPKG_ARGS} flag unpacked `basename $i .postinst`
fi
done
install -d ${IMAGE_ROOTFS}/${sysconfdir}
echo ${BUILDNAME} > ${IMAGE_ROOTFS}/${sysconfdir}/version
${ROOTFS_POSTPROCESS_COMMAND}
log_check rootfs
}
log_check() {
set +x
for target in $*
do
lf_path="${WORKDIR}/temp/log.do_$target.${PID}"
echo "log_check: Using $lf_path as logfile"
if test -e "$lf_path"
then
lf_txt="`cat $lf_path`"
for keyword_die in "Cannot find package" "exit 1" ERR Fail
do
if (echo "$lf_txt" | grep -v log_check | grep "$keyword_die") &>/dev/null
then
echo "log_check: There were error messages in the logfile"
echo -e "log_check: Matched keyword: [$keyword_die]\n"
echo "$lf_txt" | grep -v log_check | grep -i "$keyword_die"
echo ""
do_exit=1
fi
done
test "$do_exit" = 1 && exit 1
else
echo "Cannot find logfile [$lf_path]"
fi
echo "Logfile is clean"
done
set -x
}
fakeroot do_rootfs () {
rm -rf ${IMAGE_ROOTFS}
real_do_rootfs
}
# set '*' as the rootpassword so the images
# can decide if they want it or not
zap_root_password () {
sed 's%^root:[^:]*:%root:*:%' < ${IMAGE_ROOTFS}/etc/passwd >${IMAGE_ROOTFS}/etc/passwd.new
mv ${IMAGE_ROOTFS}/etc/passwd.new ${IMAGE_ROOTFS}/etc/passwd
}
create_etc_timestamp() {
date +%2m%2d%2H%2M%Y >${IMAGE_ROOTFS}/etc/timestamp
}
# export the zap_root_password and create_etc_timestamp
EXPORT_FUNCTIONS zap_root_password create_etc_timestamp
addtask rootfs before do_build after do_install

View File

@ -0,0 +1,16 @@
RPMBUILDPATH="${WORKDIR}/rpm"
RPMOPTS="--rcfile=${WORKDIR}/rpmrc"
RPMOPTS="--rcfile=${WORKDIR}/rpmrc --target ${TARGET_SYS}"
RPM="rpm ${RPMOPTS}"
RPMBUILD="rpmbuild --buildroot ${D} --short-circuit ${RPMOPTS}"
rpm_core_do_preprpm() {
mkdir -p ${RPMBUILDPATH}/{SPECS,RPMS/{i386,i586,i686,noarch,ppc,mips,mipsel,arm},SRPMS,SOURCES,BUILD}
echo 'macrofiles:/usr/lib/rpm/macros:${WORKDIR}/macros' > ${WORKDIR}/rpmrc
echo '%_topdir ${RPMBUILDPATH}' > ${WORKDIR}/macros
echo '%_repackage_dir ${WORKDIR}' >> ${WORKDIR}/macros
}
EXPORT_FUNCTIONS do_preprpm
addtask preprpm before do_fetch

View File

@ -0,0 +1,13 @@
DEPENDS += "python-scons-native"
scons_do_compile() {
${STAGING_BINDIR}/scons || \
oefatal "scons build execution failed."
}
scons_do_install() {
${STAGING_BINDIR}/scons install || \
oefatal "scons install execution failed."
}
EXPORT_FUNCTIONS do_compile do_install

View File

@ -0,0 +1,22 @@
# SDK packages are built either explicitly by the user,
# or indirectly via dependency. No need to be in 'world'.
EXCLUDE_FROM_WORLD = "1"
SDK_NAME = "${TARGET_ARCH}/oe"
PACKAGE_ARCH = "${BUILD_ARCH}"
HOST_ARCH = "${BUILD_ARCH}"
HOST_VENDOR = "${BUILD_VENDOR}"
HOST_OS = "${BUILD_OS}"
HOST_PREFIX = "${BUILD_PREFIX}"
HOST_CC_ARCH = "${BUILD_CC_ARCH}"
export CPPFLAGS = "${BUILD_CPPFLAGS}"
export CFLAGS = "${BUILD_CFLAGS}"
export CXXFLAGS = "${BUILD_CFLAGS}"
export LDFLAGS = "${BUILD_LDFLAGS}"
prefix = "/usr/local/${SDK_NAME}"
exec_prefix = "${prefix}"
FILES_${PN} = "${prefix}"

View File

@ -0,0 +1,27 @@
FILES_${PN} += '${libdir}/perl5'
sdl_do_configure () {
if [ -x ${S}/configure ] ; then
cfgcmd="${S}/configure \
-GL -GLU"
oenote "Running $cfgcmd..."
$cfgcmd || oefatal "oe_runconf failed"
if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
. ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new
mv Makefile.new Makefile
fi
else
oefatal "no configure script found"
fi
}
sdl_do_compile () {
oe_runmake PASTHRU_INC="${CFLAGS}"
}
sdl_do_install () {
oe_runmake install_vendor
}
EXPORT_FUNCTIONS do_configure do_compile do_install

View File

@ -0,0 +1,53 @@
DEPENDS_prepend = "sip-native python-sip "
#EXTRA_SIPTAGS = "-tWS_QWS -tQtPE_1_6_0 -tQt_2_3_1"
sip_do_generate() {
if [ -z "${SIP_MODULES}" ]; then
MODULES="`ls sip/*mod.sip`"
else
MODULES="${SIP_MODULES}"
fi
if [ -z "$MODULES" ]; then
die "SIP_MODULES not set and no modules found in $PWD"
else
oenote "using modules '${SIP_MODULES}' and tags '${EXTRA_SIPTAGS}'"
fi
if [ -z "${EXTRA_SIPTAGS}" ]; then
die "EXTRA_SIPTAGS needs to be set!"
else
SIPTAGS="${EXTRA_SIPTAGS}"
fi
if [ ! -z "${SIP_FEATURES}" ]; then
FEATURES="-z ${SIP_FEATURES}"
oenote "sip feature file: ${SIP_FEATURES}"
fi
for module in $MODULES
do
install -d ${module}/
oenote "calling 'sip -I sip -I ${STAGING_SIPDIR} ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.pro.in sip/${module}/${module}mod.sip'"
sip -I ${STAGING_SIPDIR} -I sip ${SIPTAGS} ${FEATURES} -c ${module} -b ${module}/${module}.sbf sip/${module}/${module}mod.sip \
|| die "Error calling sip on ${module}"
cat ${module}/${module}.sbf | sed s,target,TARGET, \
| sed s,sources,SOURCES, \
| sed s,headers,HEADERS, \
| sed s,"moc_HEADERS =","HEADERS +=", \
>${module}/${module}.pro
echo "TEMPLATE=lib" >>${module}/${module}.pro
[ "${module}" = "qt" ] && echo "" >>${module}/${module}.pro
[ "${module}" = "qtcanvas" ] && echo "" >>${module}/${module}.pro
[ "${module}" = "qttable" ] && echo "" >>${module}/${module}.pro
[ "${module}" = "qwt" ] && echo "" >>${module}/${module}.pro
[ "${module}" = "qtpe" ] && echo "" >>${module}/${module}.pro
[ "${module}" = "qtpe" ] && echo "LIBS+=-lqpe" >>${module}/${module}.pro
true
done
}
EXPORT_FUNCTIONS do_generate
addtask generate after do_unpack do_patch before do_configure

View File

@ -0,0 +1,111 @@
DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/source"
EXCLUDE_FROM ?= ".pc"
# used as part of a path. make sure it's set
DISTRO ?= "openembedded"
def get_src_tree(d):
import bb
import os, os.path
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined, unable to find source tree.")
return
s = bb.data.getVar('S', d, 0)
if not s:
bb.error("S not defined, unable to find source tree.")
return
s_tree_raw = s.split('/')[1]
s_tree = bb.data.expand(s_tree_raw, d)
src_tree_path = os.path.join(workdir, s_tree)
try:
os.listdir(src_tree_path)
except OSError:
bb.fatal("Expected to find source tree in '%s' which doesn't exist." % src_tree_path)
bb.debug("Assuming source tree is '%s'" % src_tree_path)
return s_tree
sourcepkg_do_create_orig_tgz(){
mkdir -p ${DEPLOY_DIR_SRC}
cd ${WORKDIR}
for i in ${EXCLUDE_FROM}; do
echo $i >> temp/exclude-from-file
done
src_tree=${@get_src_tree(d)}
echo $src_tree
oenote "Creating .orig.tar.gz in ${DEPLOY_DIR_SRC}/${P}.orig.tar.gz"
tar cvzf ${DEPLOY_DIR_SRC}/${P}.orig.tar.gz $src_tree --exclude-from temp/exclude-from-file
cp -a $src_tree $src_tree.orig
}
sourcepkg_do_archive_bb() {
src_tree=${@get_src_tree(d)}
dest=${WORKDIR}/$src_tree/${DISTRO}
mkdir -p $dest
cp ${FILE} $dest
}
python sourcepkg_do_dumpdata() {
import os
import os.path
workdir = bb.data.getVar('WORKDIR', d, 1)
distro = bb.data.getVar('DISTRO', d, 1)
s_tree = get_src_tree(d)
openembeddeddir = os.path.join(workdir, s_tree, distro)
dumpfile = os.path.join(openembeddeddir, bb.data.expand("${P}-${PR}.showdata.dump",d))
try:
os.mkdir(openembeddeddir)
except OSError:
# dir exists
pass
bb.note("Dumping metadata into '%s'" % dumpfile)
f = open(dumpfile, "w")
# emit variables and shell functions
bb.data.emit_env(f, d, True)
# emit the metadata which isnt valid shell
for e in d.keys():
if bb.data.getVarFlag(e, 'python', d):
f.write("\npython %s () {\n%s}\n" % (e, bb.data.getVar(e, d, 1)))
f.close()
}
sourcepkg_do_create_diff_gz(){
cd ${WORKDIR}
for i in ${EXCLUDE_FROM}; do
echo $i >> temp/exclude-from-file
done
src_tree=${@get_src_tree(d)}
for i in `find . -maxdepth 1 -type f`; do
mkdir -p $src_tree/${DISTRO}/files
cp $i $src_tree/${DISTRO}/files
done
oenote "Creating .diff.gz in ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz"
LC_ALL=C TZ=UTC0 diff --exclude-from=temp/exclude-from-file -Naur $src_tree.orig $src_tree | gzip -c > ${DEPLOY_DIR_SRC}/${P}-${PR}.diff.gz
rm -rf $src_tree.orig
}
EXPORT_FUNCTIONS do_create_orig_tgz do_archive_bb do_dumpdata do_create_diff_gz
addtask create_orig_tgz after do_unpack before do_patch
addtask archive_bb after do_patch before do_dumpdata
addtask dumpdata after archive_bb before do_create_diff_gz
addtask create_diff_gz after do_dump_data before do_configure

View File

@ -0,0 +1,40 @@
include conf/licenses.conf
SRC_DISTRIBUTECOMMAND[func] = "1"
python do_distribute_sources () {
l = bb.data.createCopy(d)
bb.data.update_data(l)
licenses = (bb.data.getVar('LICENSE', d, 1) or "").split()
if not licenses:
bb.note("LICENSE not defined")
src_distribute_licenses = (bb.data.getVar('SRC_DISTRIBUTE_LICENSES', d, 1) or "").split()
# Explanation:
# Space seperated items in LICENSE must *all* be distributable
# Each space seperated item may be used under any number of | seperated licenses.
# If any of those | seperated licenses are distributable, then that component is.
# i.e. LICENSE = "GPL LGPL"
# In this case, both components are distributable.
# LICENSE = "GPL|QPL|Proprietary"
# In this case, GPL is distributable, so the component is.
valid = 1
for l in licenses:
lvalid = 0
for i in l.split("|"):
if i in src_distribute_licenses:
lvalid = 1
if lvalid != 1:
valid = 0
if valid == 0:
bb.note("Licenses (%s) are not all listed in SRC_DISTRIBUTE_LICENSES, skipping source distribution" % licenses)
return
import re
for s in (bb.data.getVar('A', d, 1) or "").split():
s = re.sub(';.*$', '', s)
cmd = bb.data.getVar('SRC_DISTRIBUTECOMMAND', d, 1)
if not cmd:
raise bb.build.FuncFailed("Unable to distribute sources, SRC_DISTRIBUTECOMMAND not defined")
bb.data.setVar('SRC', s, d)
bb.build.exec_func('SRC_DISTRIBUTECOMMAND', d)
}
addtask distribute_sources before do_build after do_fetch

View File

@ -0,0 +1,31 @@
inherit src_distribute
# SRC_DIST_LOCAL possible values:
# copy copies the files from ${A} to the distributedir
# symlink symlinks the files from ${A} to the distributedir
# move+symlink moves the files into distributedir, and symlinks them back
SRC_DIST_LOCAL ?= "move+symlink"
SRC_DISTRIBUTEDIR ?= "${DEPLOY_DIR}/sources"
SRC_DISTRIBUTECOMMAND () {
s="${SRC}"
if [ ! -L "$s" ] && (echo "$s"|grep "^${DL_DIR}"); then
:
else
exit 0;
fi
mkdir -p ${SRC_DISTRIBUTEDIR}
case "${SRC_DIST_LOCAL}" in
copy)
test -e $s.md5 && cp -f $s.md5 ${SRC_DISTRIBUTEDIR}/
cp -f $s ${SRC_DISTRIBUTEDIR}/
;;
symlink)
test -e $s.md5 && ln -sf $s.md5 ${SRC_DISTRIBUTEDIR}/
ln -sf $s ${SRC_DISTRIBUTEDIR}/
;;
move+symlink)
mv $s ${SRC_DISTRIBUTEDIR}/
ln -sf ${SRC_DISTRIBUTEDIR}/`basename $s` $s
;;
esac
}

View File

@ -0,0 +1,26 @@
#
# Creates .srec files from images.
#
# Useful for loading with Yamon.
# Define SREC_VMAADDR in your machine.conf.
SREC_CMD = "${TARGET_PREFIX}objcopy -O srec -I binary --adjust-vma ${SREC_VMAADDR} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.${type}.srec"
# Do not build srec files for these types of images:
SREC_SKIP = "tar"
do_srec () {
if [ ${SREC_VMAADDR} = "" ] ; then
oefatal Cannot do_srec without SREC_VMAADDR defined.
fi
for type in ${IMAGE_FSTYPES}; do
for skiptype in ${SREC_SKIP}; do
if [ $type = $skiptype ] ; then continue 2 ; fi
done
${SREC_CMD}
done
return 0
}
addtask srec after do_rootfs before do_build

View File

@ -0,0 +1,158 @@
def tinder_tz_offset(off):
# get the offset.n minutes Either it is a number like
# +200 or -300
try:
return int(off)
except ValueError:
if off == "Europe/Berlin":
return 120
else:
return 0
def tinder_tinder_time(offset):
import datetime
td = datetime.timedelta(minutes=tinder_tz_offset(offset))
time = datetime.datetime.utcnow() + td
return time.strftime('%m/%d/%Y %H:%M:%S')
def tinder_tinder_start(date,offset):
import datetime, time
td = datetime.timedelta(minutes=tinder_tz_offset(offset))
ti = time.strptime(date, "%m/%d/%Y %H:%M:%S")
time = datetime.datetime(*ti[0:7])+td
return time.strftime('%m/%d/%Y %H:%M:%S')
def tinder_send_email(da, header, log):
import smtplib
from bb import data
from email.MIMEText import MIMEText
msg = MIMEText(header +'\n' + log)
msg['Subject'] = data.getVar('TINDER_SUBJECT',da, True) or "Tinder-Client build log"
msg['To'] = data.getVar('TINDER_MAILTO' ,da, True)
msg['From'] = data.getVar('TINDER_FROM', da, True)
s = smtplib.SMTP()
s.connect()
s.sendmail(data.getVar('TINDER_FROM', da, True), [data.getVar('TINDER_MAILTO', da, True)], msg.as_string())
s.close()
def tinder_send_http(da, header, log):
from bb import data
import httplib, urllib
cont = "\n%s\n%s" % ( header, log)
headers = {"Content-type": "multipart/form-data" }
conn = httplib.HTTPConnection(data.getVar('TINDER_HOST',da, True))
conn.request("POST", data.getVar('TINDER_URL',da,True), cont, headers)
conn.close()
# Prepare tinderbox mail header
def tinder_prepare_mail_header(da, status):
from bb import data
str = "tinderbox: administrator: %s\n" % data.getVar('TINDER_ADMIN', da, True)
str += "tinderbox: starttime: %s\n" % tinder_tinder_start(data.getVar('TINDER_START', da, True) or data.getVar('BUILDSTART', da, True), data.getVar('TINDER_TZ', da, True))
str += "tinderbox: buildname: %s\n" % data.getVar('TINDER_BUILD', da, True)
str += "tinderbox: errorparser: %s\n" % data.getVar('TINDER_ERROR', da, True)
str += "tinderbox: status: %s\n" % status
str += "tinderbox: timenow: %s\n" % tinder_tinder_time(data.getVar('TINDER_TZ', da, True))
str += "tinderbox: tree: %s\n" % data.getVar('TINDER_TREE', da, True)
str += "tinderbox: buildfamily: %s\n" % "unix"
str += "tinderbox: END"
return str
def tinder_do_tinder_report(event):
"""
Report to the tinderbox. Either we will report every step
(depending on TINDER_VERBOSE_REPORT) at the end we will send the
tinderclient.log
"""
from bb.event import getName
from bb import data, mkdirhier
import os, glob
# variables
name = getName(event)
log = ""
header = ""
verbose = data.getVar('TINDER_VERBOSE_REPORT', event.data, True) == "1"
# Check what we need to do Build* shows we start or are done
if name == "BuildStarted":
header = tinder_prepare_mail_header(event.data, 'building')
# generate
for var in os.environ:
log += "%s=%s\n" % (var, os.environ[var])
mkdirhier(data.getVar('TMPDIR', event.data, True))
file = open(data.getVar('TINDER_LOG', event.data, True), 'w')
file.write(log)
if not verbose:
header = ""
if name == "PkgFailed" or name == "BuildCompleted":
status = 'build_failed'
if name == "BuildCompleted":
status = "success"
header = tinder_prepare_mail_header(event.data, status)
# append the log
log_file = data.getVar('TINDER_LOG', event.data, True)
file = open(log_file, 'r')
for line in file.readlines():
log += line
if verbose and name == "TaskStarted":
header = tinder_prepare_mail_header(event.data, 'building')
log = "Task %s started" % event.task
if verbose and name == "PkgStarted":
header = tinder_prepare_mail_header(event.data, 'building')
log = "Package %s started" % data.getVar('P', event.data, True)
if verbose and name == "PkgSucceeded":
header = tinder_prepare_mail_header(event.data, 'building')
log = "Package %s done" % data.getVar('P', event.data, True)
# Append the Task Log
if name == "TaskSucceeded" or name == "TaskFailed":
log_file = glob.glob("%s/log.%s.*" % (data.getVar('T', event.data, True), event.task))
if len(log_file) != 0:
to_file = data.getVar('TINDER_LOG', event.data, True)
log_txt = open(log_file[0], 'r').readlines()
to_file = open(to_file, 'a')
to_file.writelines(log_txt)
# append to the log
if verbose:
header = tinder_prepare_mail_header(event.data, 'building')
for line in log_txt:
log += line
# now mail the log
if len(log) == 0 or len(header) == 0:
return
log_post_method = tinder_send_email
if data.getVar('TINDER_SENDLOG', event.data, True) == "http":
log_post_method = tinder_send_http
log_post_method(event.data, header, log)
addhandler tinderclient_eventhandler
python tinderclient_eventhandler() {
from bb import note, error, data
from bb.event import NotHandled
do_tinder_report = data.getVar('TINDER_REPORT', e.data, True)
if do_tinder_report and do_tinder_report == "1":
tinder_do_tinder_report(e)
return NotHandled
}

View File

@ -0,0 +1,77 @@
DEPENDS_prepend="tmake "
python tmake_do_createpro() {
import glob, sys
from bb import note
out_vartranslate = {
"TMAKE_HEADERS": "HEADERS",
"TMAKE_INTERFACES": "INTERFACES",
"TMAKE_TEMPLATE": "TEMPLATE",
"TMAKE_CONFIG": "CONFIG",
"TMAKE_DESTDIR": "DESTDIR",
"TMAKE_SOURCES": "SOURCES",
"TMAKE_DEPENDPATH": "DEPENDPATH",
"TMAKE_INCLUDEPATH": "INCLUDEPATH",
"TMAKE_TARGET": "TARGET",
"TMAKE_LIBS": "LIBS",
}
s = data.getVar('S', d, 1) or ""
os.chdir(s)
profiles = (data.getVar('TMAKE_PROFILES', d, 1) or "").split()
if not profiles:
profiles = ["*.pro"]
for pro in profiles:
ppro = glob.glob(pro)
if ppro:
if ppro != [pro]:
del profiles[profiles.index(pro)]
profiles += ppro
continue
if ppro[0].find('*'):
del profiles[profiles.index(pro)]
continue
else:
del profiles[profiles.index(pro)]
if len(profiles) != 0:
return
# output .pro using this metadata store
try:
from __builtin__ import file
profile = file(data.expand('${PN}.pro', d), 'w')
except OSError:
raise FuncFailed("unable to open pro file for writing.")
# fd = sys.__stdout__
fd = profile
for var in out_vartranslate.keys():
val = data.getVar(var, d, 1)
if val:
fd.write("%s\t: %s\n" % (out_vartranslate[var], val))
# if fd is not sys.__stdout__:
fd.close()
}
tmake_do_configure() {
paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-${TARGET_ARCH}-g++ $STAGING_DIR/share/tmake/$OS-g++"
if (echo "${TARGET_ARCH}"|grep -q 'i.86'); then
paths="${STAGING_DATADIR}/tmake/qws/${TARGET_OS}-x86-g++ $paths"
fi
for i in $paths; do
if test -e $i; then
export TMAKEPATH=$i
break
fi
done
if [ -z "${TMAKE_PROFILES}" ]; then
TMAKE_PROFILES="`ls *.pro`"
fi
tmake -o Makefile $TMAKE_PROFILES || die "Error calling tmake on ${TMAKE_PROFILES}"
}
EXPORT_FUNCTIONS do_configure do_createpro
addtask configure after do_unpack do_patch before do_compile
addtask createpro before do_configure after do_unpack do_patch

View File

@ -0,0 +1,33 @@
# defaults
ALTERNATIVE_PRIORITY = "10"
ALTERNATIVE_LINK = "${bindir}/${ALTERNATIVE_NAME}"
update_alternatives_postinst() {
update-alternatives --install ${ALTERNATIVE_LINK} ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH} ${ALTERNATIVE_PRIORITY}
}
update_alternatives_postrm() {
update-alternatives --remove ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH}
}
python __anonymous() {
if bb.data.getVar('ALTERNATIVE_NAME', d) == None:
raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d)
if bb.data.getVar('ALTERNATIVE_PATH', d) == None:
raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_PATH" % bb.data.getVar('FILE', d)
}
python populate_packages_prepend () {
pkg = bb.data.getVar('PN', d, 1)
bb.note('adding update-alternatives calls to postinst/postrm for %s' % pkg)
postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('update_alternatives_postinst', d, 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
postrm = bb.data.getVar('pkg_postrm_%s' % pkg, d, 1) or bb.data.getVar('pkg_postrm', d, 1)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += bb.data.getVar('update_alternatives_postrm', d, 1)
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
}

View File

@ -0,0 +1,69 @@
DEPENDS_append = " update-rc.d"
RDEPENDS_append = " update-rc.d"
INITSCRIPT_PARAMS ?= "defaults"
INIT_D_DIR = "${sysconfdir}/init.d"
updatercd_postinst() {
if test "x$D" != "x"; then
D="-r $D"
else
D="-s"
fi
update-rc.d $D ${INITSCRIPT_NAME} ${INITSCRIPT_PARAMS}
}
updatercd_prerm() {
if test "x$D" != "x"; then
D="-r $D"
else
${INIT_D_DIR}/${INITSCRIPT_NAME} stop
fi
}
updatercd_postrm() {
update-rc.d $D ${INITSCRIPT_NAME} remove
}
python __anonymous() {
if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None:
if bb.data.getVar('INITSCRIPT_NAME', d) == None:
raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d)
if bb.data.getVar('INITSCRIPT_PARAMS', d) == None:
raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_PARAMS" % bb.data.getVar('FILE', d)
}
python populate_packages_prepend () {
def update_rcd_package(pkg):
bb.debug(1, 'adding update-rc.d calls to postinst/postrm for %s' % pkg)
localdata = bb.data.createCopy(d)
overrides = bb.data.getVar("OVERRIDES", localdata, 1)
bb.data.setVar("OVERRIDES", "%s:%s" % (pkg, overrides), localdata)
bb.data.update_data(localdata)
postinst = bb.data.getVar('pkg_postinst', localdata, 1)
if not postinst:
postinst = '#!/bin/sh\n'
postinst += bb.data.getVar('updatercd_postinst', localdata, 1)
bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d)
prerm = bb.data.getVar('pkg_prerm', localdata, 1)
if not prerm:
prerm = '#!/bin/sh\n'
prerm += bb.data.getVar('updatercd_prerm', localdata, 1)
bb.data.setVar('pkg_prerm_%s' % pkg, prerm, d)
postrm = bb.data.getVar('pkg_postrm', localdata, 1)
if not postrm:
postrm = '#!/bin/sh\n'
postrm += bb.data.getVar('updatercd_postrm', localdata, 1)
bb.data.setVar('pkg_postrm_%s' % pkg, postrm, d)
pkgs = bb.data.getVar('INITSCRIPT_PACKAGES', d, 1)
if pkgs == None:
pkgs = bb.data.getVar('PN', d, 1)
packages = (bb.data.getVar('PACKAGES', d, 1) or "").split()
if not pkgs in packages and packages != []:
pkgs = packages[0]
for pkg in pkgs.split():
update_rcd_package(pkg)
}

View File

@ -0,0 +1,33 @@
# we dont need the kernel in the image
ROOTFS_POSTPROCESS_COMMAND += "rm -f ${IMAGE_ROOTFS}/boot/zImage*"
def wrt_get_kernel_version(d):
import bb
if bb.data.inherits_class('image_ipk', d):
skd = bb.data.getVar('STAGING_KERNEL_DIR', d, 1)
return base_read_file(skd+'/kernel-abiversion')
return "-no kernel version for available-"
wrt_create_images() {
I=${DEPLOY_DIR}/images
KERNEL_VERSION="${@wrt_get_kernel_version(d)}"
for type in ${IMAGE_FSTYPES}; do
# generic
trx -o ${I}/wrt-generic-${type}.trx ${I}/loader.gz \
${I}/wrt-kernel-${KERNEL_VERSION}.lzma -a 0x10000 ${I}/${IMAGE_NAME}.rootfs.${type}
# WRT54GS
addpattern -2 -i ${I}/wrt-generic-${type}.trx -o ${I}/wrt54gs-${type}.trx -g
# WRT54G
sed "1s,^W54S,W54G," ${I}/wrt54gs-${type}.trx > ${I}/wrt54g-${type}.trx
# motorola
motorola-bin ${I}/wrt-generic-${type}.trx ${I}/motorola-${type}.bin
done;
}
IMAGE_POSTPROCESS_COMMAND += "wrt_create_images;"
DEPENDS_prepend = "${@["wrt-imagetools-native ", ""][(bb.data.getVar('PACKAGES', d, 1) == '')]}"

View File

@ -0,0 +1,19 @@
# xfce.oeclass
# Copyright (C) 2004, Advanced Micro Devices, Inc. All Rights Reserved
# Released under the MIT license (see packages/COPYING)
# Global class to make it easier to maintain XFCE packages
HOMEPAGE = "http://www.xfce.org"
LICENSE = "LGPL-2"
SRC_URI = "http://www.us.xfce.org/archive/xfce-${PV}/src/${PN}-${PV}.tar.gz"
inherit autotools
EXTRA_OECONF += "--with-pluginsdir=${libdir}/xfce4/panel-plugins/"
# FIXME: Put icons in their own package too?
FILES_${PN} += "${datadir}/icons/* ${datadir}/applications/* ${libdir}/xfce4/modules/*.so*"
FILES_${PN}-doc += "${datadir}/xfce4/doc"

View File

@ -0,0 +1,15 @@
LICENSE= "BSD-X"
SECTION = "x11/libs"
XLIBS_CVS = "cvs://anoncvs:anoncvs@pdx.freedesktop.org/cvs/xlibs"
inherit autotools pkgconfig
do_stage() {
oe_runmake install prefix=${STAGING_DIR} \
bindir=${STAGING_BINDIR} \
includedir=${STAGING_INCDIR} \
libdir=${STAGING_LIBDIR} \
datadir=${STAGING_DATADIR} \
mandir=${STAGING_DATADIR}/man
}

View File

@ -0,0 +1,381 @@
##################################################################
# Standard target filesystem paths.
##################################################################
# Path prefixes
export base_prefix = ""
export prefix = "/usr"
export exec_prefix = "${prefix}"
# Base paths
export base_bindir = "${base_prefix}/bin"
export base_sbindir = "${base_prefix}/sbin"
export base_libdir = "${base_prefix}/lib"
# Architecture independent paths
export datadir = "${prefix}/share"
export sysconfdir = "/etc"
export sharedstatedir = "${prefix}/com"
export localstatedir = "/var"
export infodir = "${datadir}/info"
export mandir = "${datadir}/man"
export docdir = "${datadir}/doc"
export servicedir = "/srv"
# Architecture dependent paths
export bindir = "${exec_prefix}/bin"
export sbindir = "${exec_prefix}/sbin"
export libexecdir = "${exec_prefix}/libexec"
export libdir = "${exec_prefix}/lib"
export includedir = "${exec_prefix}/include"
export oldincludedir = "${exec_prefix}/include"
##################################################################
# Architecture-dependent build variables.
##################################################################
BUILD_ARCH = "${@os.uname()[4]}"
BUILD_OS = "${@os.uname()[0].lower()}"
BUILD_VENDOR = ""
BUILD_SYS = "${BUILD_ARCH}${BUILD_VENDOR}-${BUILD_OS}"
BUILD_PREFIX = ""
BUILD_CC_ARCH = ""
HOST_ARCH = "${TARGET_ARCH}"
HOST_OS = "${TARGET_OS}"
HOST_VENDOR = "${TARGET_VENDOR}"
HOST_SYS = "${HOST_ARCH}${HOST_VENDOR}-${HOST_OS}"
HOST_PREFIX = "${TARGET_PREFIX}"
HOST_CC_ARCH = "${TARGET_CC_ARCH}"
TARGET_ARCH = "INVALID"
TARGET_OS = "INVALID"
TARGET_VENDOR = "${BUILD_VENDOR}"
TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}"
TARGET_PREFIX = "${TARGET_SYS}-"
TARGET_CC_ARCH = ""
PACKAGE_ARCH = "${HOST_ARCH}"
MACHINE_ARCH = "${@[bb.data.getVar('HOST_ARCH', d, 1), bb.data.getVar('MACHINE', d, 1)][bool(bb.data.getVar('MACHINE', d, 1))]}"
##################################################################
# Date/time variables.
##################################################################
DATE := "${@time.strftime('%Y%m%d',time.gmtime())}"
TIME := "${@time.strftime('%H%M%S',time.gmtime())}"
DATETIME = "${DATE}${TIME}"
##################################################################
# Package default variables.
##################################################################
PN = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[0] or 'defaultpkgname'}"
PV = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[1] or '1.0'}"
PR = "${@bb.parse.BBHandler.vars_from_file(bb.data.getVar('FILE',d),d)[2] or 'r0'}"
PF = "${PN}-${PV}-${PR}"
P = "${PN}-${PV}"
# Package info.
SECTION = "base"
PRIORITY = "optional"
DESCRIPTION = "Version ${PV}-${PR} of package ${PN}"
LICENSE = "unknown"
MAINTAINER = "OpenEmbedded Team <oe@handhelds.org>"
HOMEPAGE = "unknown"
# Package dependencies and provides.
DEPENDS = ""
RDEPENDS = ""
PROVIDES = ""
PROVIDES_prepend = "${P} ${PF} ${PN} "
RPROVIDES = ""
PACKAGES = "${PN} ${PN}-doc ${PN}-dev ${PN}-locale"
FILES = ""
FILES_${PN} = "${bindir} ${sbindir} ${libexecdir} ${libdir}/lib*.so.* \
${sysconfdir} ${sharedstatedir} ${localstatedir} \
/bin /sbin /lib/*.so* ${datadir}/${PN} ${libdir}/${PN} \
${datadir}/pixmaps ${datadir}/applications \
${datadir}/idl ${datadir}/omf ${datadir}/sounds \
${libdir}/bonobo/servers"
SECTION_${PN}-doc = "doc"
FILES_${PN}-doc = "${docdir} ${mandir} ${infodir} ${datadir}/gtk-doc \
${datadir}/gnome/help"
SECTION_${PN}-dev = "devel"
FILES_${PN}-dev = "${includedir} ${libdir}/lib*.so ${libdir}/*.la \
${libdir}/*.a ${libdir}/*.o ${libdir}/pkgconfig \
/lib/*.a /lib/*.o ${datadir}/aclocal"
FILES_${PN}-locale = "${datadir}/locale"
# File manifest
export MANIFEST = "${FILESDIR}/manifest"
FILE_DIRNAME = "${@os.path.dirname(bb.data.getVar('FILE', d))}"
FILESPATH = "${FILE_DIRNAME}/${PF}:${FILE_DIRNAME}/${P}:${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/files:${FILE_DIRNAME}"
FILESDIR = "${@bb.which(bb.data.getVar('FILESPATH', d, 1), '.')}"
##################################################################
# General work and output directories for the build system.
##################################################################
TMPDIR = "${TOPDIR}/tmp"
CACHE = "${TMPDIR}/cache"
DL_DIR = "${TMPDIR}/downloads"
CVSDIR = "${DL_DIR}/cvs"
STAMP = "${TMPDIR}/stamps/${PF}"
WORKDIR = "${TMPDIR}/work/${PF}"
T = "${WORKDIR}/temp"
D = "${WORKDIR}/image"
S = "${WORKDIR}/${P}"
B = "${S}"
STAGING_DIR = "${TMPDIR}/staging"
STAGING_BINDIR = "${STAGING_DIR}/${BUILD_SYS}/bin"
STAGING_LIBDIR = "${STAGING_DIR}/${HOST_SYS}/lib"
STAGING_INCDIR = "${STAGING_DIR}/${HOST_SYS}/include"
STAGING_DATADIR = "${STAGING_DIR}/${HOST_SYS}/share"
STAGING_LOADER_DIR = "${STAGING_DIR}/${HOST_SYS}/loader"
DEPLOY_DIR = "${TMPDIR}/deploy"
DEPLOY_DIR_TAR = "${DEPLOY_DIR}/tar"
DEPLOY_DIR_IPK = "${DEPLOY_DIR}/ipk"
DEPLOY_DIR_RPM = "${DEPLOY_DIR}/rpm"
##################################################################
# Kernel info.
##################################################################
OLDEST_KERNEL = "2.4.0"
STAGING_KERNEL_DIR = "${STAGING_DIR}/${HOST_SYS}/kernel"
##################################################################
# Specific image creation and rootfs population info.
##################################################################
DEPLOY_DIR_IMAGE = "${DEPLOY_DIR}/images"
IMAGE_ROOTFS = "${TMPDIR}/rootfs"
IMAGE_BASENAME = "rootfs"
IMAGE_NAME = "${IMAGE_BASENAME}-${MACHINE}-${DATETIME}"
IMAGE_CMD = ""
IMAGE_CMD_jffs2 = "mkfs.jffs2 --root=${IMAGE_ROOTFS} --faketime \
--output=${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.jffs2 \
${EXTRA_IMAGECMD}"
IMAGE_CMD_cramfs = "mkcramfs ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.cramfs ${EXTRA_IMAGECMD}"
IMAGE_CMD_ext2 = "genext2fs -b ${IMAGE_ROOTFS_SIZE} -d ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ext2 ${EXTRA_IMAGECMD}"
IMAGE_CMD_ext2.gz = "mkdir ${DEPLOY_DIR_IMAGE}/tmp.gz; genext2fs -b ${IMAGE_ROOTFS_SIZE} -d ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/tmp.gz/${IMAGE_NAME}.rootfs.ext2 ${EXTRA_IMAGECMD}; gzip -f -9 ${DEPLOY_DIR_IMAGE}/tmp.gz/${IMAGE_NAME}.rootfs.ext2; mv ${DEPLOY_DIR_IMAGE}/tmp.gz/${IMAGE_NAME}.rootfs.ext2.gz ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ext2.gz; rmdir ${DEPLOY_DIR_IMAGE}/tmp.gz"
IMAGE_CMD_squashfs = "mksquashfs ${IMAGE_ROOTFS} ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.squashfs ${EXTRA_IMAGECMD} -noappend"
IMAGE_CMD_tar = "cd ${IMAGE_ROOTFS} && tar -jcvf ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.tar.bz2 ."
EXTRA_IMAGECMD = ""
EXTRA_IMAGECMD_jffs2 = "--pad --little-endian --eraseblock=0x40000"
EXTRA_IMAGECMD_squashfs = "-le -b 16384"
IMAGE_FSTYPE = "jffs2"
IMAGE_FSTYPES = "${IMAGE_FSTYPE}"
IMAGE_ROOTFS_SIZE_ext2 = "65536"
IMAGE_ROOTFS_SIZE_ext2.gz = "65536"
IMAGE_DEPENDS = ""
IMAGE_DEPENDS_jffs2 = "mtd-utils-native"
IMAGE_DEPENDS_cramfs = "cramfs-native"
IMAGE_DEPENDS_ext2 = "genext2fs-native"
IMAGE_DEPENDS_ext2.gz = "genext2fs-native"
IMAGE_DEPENDS_squashfs = "squashfs-tools-native"
EXTRA_IMAGEDEPENDS = ""
##################################################################
# Toolchain info.
##################################################################
CROSS_DIR = "${TMPDIR}/cross"
CROSS_DATADIR = "${CROSS_DIR}/share"
export PATH_prepend = "${STAGING_BINDIR}/${HOST_SYS}:${STAGING_BINDIR}:${CROSS_DIR}/bin:"
##################################################################
# Build utility info.
##################################################################
CCACHE = "${@bb.which(bb.data.getVar('PATH', d), 'ccache') and 'ccache '}"
export CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_ARCH}"
export CXX = "${CCACHE}${HOST_PREFIX}g++ ${HOST_CC_ARCH}"
export F77 = "${CCACHE}${HOST_PREFIX}g77 ${HOST_CC_ARCH}"
export CPP = "${HOST_PREFIX}gcc -E"
export LD = "${HOST_PREFIX}ld"
export CCLD = "${CC}"
export AR = "${HOST_PREFIX}ar"
export AS = "${HOST_PREFIX}as"
export RANLIB = "${HOST_PREFIX}ranlib"
export STRIP = "${HOST_PREFIX}strip"
export BUILD_CC = "${CCACHE}${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
export BUILD_CXX = "${CCACHE}${BUILD_PREFIX}g++ ${BUILD_CC_ARCH}"
export BUILD_F77 = "${CCACHE}${BUILD_PREFIX}g77 ${BUILD_CC_ARCH}"
export BUILD_CPP = "${BUILD_PREFIX}cpp"
export BUILD_LD = "${BUILD_PREFIX}ld"
export BUILD_CCLD = "${BUILD_PREFIX}gcc"
export BUILD_AR = "${BUILD_PREFIX}ar"
export BUILD_RANLIB = "${BUILD_PREFIX}ranlib"
export BUILD_STRIP = "${BUILD_PREFIX}strip"
export MAKE = "make"
EXTRA_OEMAKE = "-e MAKEFLAGS="
##################################################################
# Build flags and options.
##################################################################
export BUILD_CPPFLAGS = "-I${STAGING_DIR}/${BUILD_SYS}/include"
export CPPFLAGS = "${TARGET_CPPFLAGS}"
export TARGET_CPPFLAGS = "-I${STAGING_DIR}/${TARGET_SYS}/include"
export BUILD_CFLAGS = "${BUILD_CPPFLAGS} ${BUILD_OPTIMIZATION}"
export CFLAGS = "${TARGET_CFLAGS}"
export TARGET_CFLAGS = "${TARGET_CPPFLAGS} ${SELECTED_OPTIMIZATION}"
export BUILD_CXXFLAGS = "${BUILD_CFLAGS} -fpermissive"
export CXXFLAGS = "${TARGET_CXXFLAGS}"
export TARGET_CXXFLAGS = "${TARGET_CFLAGS} -fpermissive"
export BUILD_LDFLAGS = "-L${STAGING_DIR}/${BUILD_SYS}/lib \
-Wl,-rpath-link,${STAGING_DIR}/${BUILD_SYS}/lib \
-Wl,-rpath,${STAGING_DIR}/${BUILD_SYS}/lib -Wl,-O1"
export LDFLAGS = "${TARGET_LDFLAGS}"
export TARGET_LDFLAGS = "-L${STAGING_DIR}/${TARGET_SYS}/lib \
-Wl,-rpath-link,${STAGING_DIR}/${TARGET_SYS}/lib \
-Wl,-O1"
# Which flags to leave by strip-flags() in bin/build/oebuild.sh ?
ALLOWED_FLAGS = "-O -mcpu -march -pipe"
# Pass parallel make options to the compile task only
EXTRA_OEMAKE_prepend_task_do_compile = "${PARALLEL_MAKE} "
##################################################################
# Optimization flags.
##################################################################
FULL_OPTIMIZATION = "-fexpensive-optimizations -fomit-frame-pointer -frename-registers -O2"
DEBUG_OPTIMIZATION = "-O -g"
SELECTED_OPTIMIZATION = "${@bb.data.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][bb.data.getVar('DEBUG_BUILD', d, 1) == '1'], d, 1)}"
BUILD_OPTIMIZATION = "-O2"
##################################################################
# Bootstrap stuff.
##################################################################
BOOTSTRAP_EXTRA_DEPENDS = ""
BOOTSTRAP_EXTRA_RDEPENDS = ""
BOOTSTRAP_EXTRA_RRECOMMENDS = ""
##################################################################
# Palmtop stuff.
##################################################################
export QTDIR = "${STAGING_DIR}/${HOST_SYS}/qt2"
export QPEDIR = "${STAGING_DIR}/${HOST_SYS}"
export OPIEDIR = "${STAGING_DIR}/${HOST_SYS}"
export palmtopdir = "/opt/QtPalmtop"
export palmqtdir = "/opt/QtPalmtop"
##################################################################
# Download locations and utilities.
##################################################################
GNU_MIRROR = "ftp://ftp.gnu.org/gnu"
DEBIAN_MIRROR = "ftp://ftp.debian.org/debian/pool"
SOURCEFORGE_MIRROR = "http://heanet.dl.sourceforge.net/sourceforge"
GPE_MIRROR = "http://ftp.handhelds.org/pub/projects/gpe/source"
XLIBS_MIRROR = "http://xlibs.freedesktop.org/release"
GNOME_MIRROR = "http://ftp.gnome.org/pub/GNOME/sources"
HANDHELDS_CVS = "cvs://anoncvs:anoncvs@cvs.handhelds.org/cvs"
# You can use the mirror of your country to get faster downloads by putting
# export DEBIAN_MIRROR = "ftp://ftp.de.debian.org/debian/pool"
# export SOURCEFORGE_MIRROR = "http://belnet.dl.sourceforge.net/sourceforge"
# into your local.conf
FETCHCOMMAND = ""
FETCHCOMMAND_wget = "/usr/bin/env wget -t 5 --passive-ftp -P ${DL_DIR} ${URI}"
FETCHCOMMAND_cvs = "/usr/bin/env cvs -d${CVSROOT} co ${CVSCOOPTS} ${CVSMODULE}"
RESUMECOMMAND = ""
RESUMECOMMAND_wget = "/usr/bin/env wget -c -t 5 --passive-ftp -P ${DL_DIR} ${URI}"
UPDATECOMMAND = ""
UPDATECOMMAND_cvs = "/usr/bin/env cvs update -d -P ${CVSCOOPTS}"
CVSDATE = "${DATE}"
SRC_URI = "file://${FILE}"
##################################################################
# Miscellaneous utilities.
##################################################################
MKTEMPDIRCMD = "mktemp -d -q ${TMPBASE}"
MKTEMPCMD = "mktemp -q ${TMPBASE}"
# Program to be used to patch sources, use 'inherit patcher' to overwrite this:
PATCHCLEANCMD = 'if [ -n "`quilt applied`" ]; then quilt pop -a -R -f || exit 1; fi'
PATCHCMD = "pnum='%s'; name='%s'; patch='%s'; mkdir -p patches ; quilt upgrade >/dev/null 2>&1; quilt import -f -p $pnum -n $name $patch; chmod u+w patches/$name; quilt push"
PATCH_DEPENDS = "quilt-native"
# GNU patch tries to be intellgent about checking out read-only files from
# a RCS, which freaks out those special folks with active Perforce clients
# the following makes patch ignore RCS:
export PATCH_GET=0
# Program to be used to build ipkg packages
IPKGBUILDCMD = "ipkg-build -o 0 -g 0"
##################################################################
# Not sure about the rest of this yet.
##################################################################
# slot - currently unused by OE. portage remnants
SLOT = "0"
# Other
export PKG_CONFIG_PATH = "${STAGING_DATADIR}/pkgconfig"
export PKG_CONFIG_DISABLE_UNINSTALLED = "yes"
export QMAKE_MKSPEC_PATH = "${STAGING_DIR}/${BUILD_SYS}/share/qmake"
export STAGING_SIPDIR = "${STAGING_DIR}/${BUILD_SYS}/share/sip"
export STAGING_IDLDIR = "${STAGING_DATADIR}/idl"
# default test results for autoconf
# possible candidate for moving into autotools.oeclass -CL
export CONFIG_SITE = "${@bb.which(bb.data.getVar('BBPATH', d, 1), 'site/%s-%s' % (bb.data.getVar('HOST_ARCH', d, 1), bb.data.getVar('HOST_OS', d, 1)))}"
# library package naming
AUTO_LIBNAME_PKGS = "${PACKAGES}"
###
### Config file processing
###
# This means that an envionment variable named '<foo>_arm' overrides an
# environment variable '<foo>' (when ${TARGET_ARCH} is arm). And the same: an
# environment variable '<foo>_ramses' overrides both '<foo>' and '<foo>_arm
# when ${MACHINE} is 'ramses'. And finally '<foo>_local' overrides anything.
#
# This works for functions as well, they are really just environment variables.
OVERRIDES = "local:${MACHINE}:${DISTRO}:${TARGET_OS}:${TARGET_ARCH}:build-${BUILD_OS}"
##################################################################
# Include the rest of the config files.
##################################################################
include conf/site.conf
include conf/auto.conf
include conf/local.conf
include conf/build/${BUILD_SYS}.conf
include conf/target/${TARGET_SYS}.conf
include conf/machine/${MACHINE}.conf
include conf/distro/${DISTRO}.conf
include conf/documentation.conf

View File

@ -0,0 +1,52 @@
include conf/distro/familiar.conf
DISTRO = "familiar"
DISTRO_NAME = "Familiar Linux"
DISTRO_VERSION = "v0.8.3-unofficial-unsupported-snapshot-${DATE}"
DISTRO_TYPE = "debug"
#DISTRO_TYPE = "release"
FEED_URIS += " \
base##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/base \
updates##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/updates"
#CVSDATE = 20050331
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc:gcc-cross"
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}g++:gcc-cross"
# The CSL compiler is unusable because
# 1) certain programs stop to compile
# 2) more programs segfault
PREFERRED_VERSION_gcc = "3.4.4"
PREFERRED_VERSION_gcc-cross = "3.4.4"
PREFERRED_VERSION_gcc-cross-initial = "3.4.4"
#
# Opie
#
PREFERRED_PROVIDERS += " virtual/libqpe:libqpe-opie"
PREFERRED_VERSION_qte = "2.3.10"
#OPIE_VERSION = "1.2.0"
include conf/distro/preferred-opie-versions.inc
#
# GPE
#
PREFERRED_PROVIDERS += "virtual/xserver:xserver-kdrive"
PREFERRED_PROVIDERS += "virtual/gconf:gconf-dbus"
#PREFERRED_PROVIDER_x11 = "diet-x11"
include conf/distro/preferred-gpe-versions-2.7.inc
#
# E
#
include conf/distro/preferred-e-versions.inc

View File

@ -0,0 +1,11 @@
#@TYPE: Distribution
#@NAME: Familiar Linux
#@DESCRIPTION: Distribution configuration for Familiar Linux (handhelds.org)
INHERIT += "package_ipk debian"
TARGET_OS = "linux"
PARALLEL_INSTALL_MODULES = "1"
UDEV_DEVFS_RULES = "1"

View File

@ -0,0 +1,40 @@
include conf/distro/familiar.conf
DISTRO = "maemo"
DISTRO_NAME = "Maemo Linux"
DISTRO_VERSION = "v1.0b-${DATE}"
DISTRO_TYPE = "debug"
#DISTRO_TYPE = "release"
FEED_URIS += " \
base##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/base \
updates##http://familiar.handhelds.org/releases/${DISTRO_VERSION}/feed/updates"
#CVSDATE = 20050331
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}gcc:gcc-cross"
PREFERRED_PROVIDERS += "virtual/${TARGET_PREFIX}g++:gcc-cross"
#PREFERRED_VERSION_binutils-cross = "2.15.91.0.2"
#PREFERRED_VERSION_gcc-cross = "3.4.4"
#PREFERRED_VERSION_gcc-cross-initial = "3.4.4
#PREFERRED_VERSION_libtool-native = "1.5.6"
#PREFERRED_VERSION_libtool-cross= "1.5.6"
#
# GPE
#
PREFERRED_PROVIDERS += "virtual/xserver:xserver-kdrive"
PREFERRED_PROVIDERS += "virtual/gconf:gconf-dbus"
PREFERRED_PROVIDER_x11 = "diet-x11"
include conf/distro/preferred-gpe-versions.inc
#
# Maemo
#
include conf/distro/maemo-preferred.inc

View File

@ -0,0 +1,10 @@
PREFERRED_PROVIDER_gconf = "gconf-osso"
PREFERRED_PROVIDER_tslib = "tslib-maemo"
PREFERRED_VERSION_dbus = "0.23.1-osso5"
PREFERRED_VERSION_audiofile = "0.2.6-3osso4"
PREFERRED_PROVIDER_esd = "osso-esd"
PREFERRED_VERSION_gtk+ = "2.6.4-1.osso7"
PREFERRED_VERSION_glib-2.0 = "2.6.4"
PREFERRED_VERSION_pango = "1.8.1"
PREFERRED_VERSION_atk = "1.9.0"
PREFERRED_VERSION_diet-x11 ?= "6.2.1"

View File

@ -0,0 +1,58 @@
include conf/distro/openzaurus.conf
DISTRO = "openzaurus"
DISTRO_NAME = "OpenZaurus"
DISTRO_VERSION = "3.5.3-snapshot-${DATE}"
# DISTRO_VERSION = "3.5.4"
DISTRO_TYPE = "debug"
# DISTRO_TYPE = "release"
FEED_URIS += " \
upgrades##http://openzaurus.org/official/unstable/3.5.3/upgrades/ \
machine##http://openzaurus.org/official/unstable/3.5.3/feed/machine/${MACHINE} \
base##http://openzaurus.org/official/unstable/3.5.3/feed/base/ \
libs##http://openzaurus.org/official/unstable/3.5.3/feed/libs/ \
console##http://openzaurus.org/official/unstable/3.5.3/feed/console \
devel##http://openzaurus.org/official/unstable/3.5.3/feed/devel"
# CVSDATE = "20050704"
#
# Zaurus
#
ASSUME_PROVIDED += "virtual/arm-linux-gcc-2.95"
OEINCLUDELOGS = "yes"
KERNEL_CONSOLE = "ttyS0"
#DEBUG_OPTIMIZATION = "-O -g3"
#DEBUG_BUILD = "1"
#INHIBIT_PACKAGE_STRIP = "1"
#
# Base
#
PREFERRED_VERSION_binutils-cross = "2.15.94.0.1"
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc:gcc-cross"
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}g++:gcc-cross"
#
# Opie
#
PREFERRED_PROVIDERS += " virtual/libqpe:libqpe-opie"
OPIE_VERSION = "1.2.1"
include conf/distro/preferred-opie-versions.inc
#
# GPE
#
PREFERRED_PROVIDERS += "virtual/xserver:xserver-kdrive"
#PREFERRED_PROVIDER_x11 = "diet-x11"
PREFERRED_PROVIDER_x11 = "x11"
include conf/distro/preferred-gpe-versions-2.7.inc
#
# E
#
include conf/distro/preferred-e-versions.inc

View File

@ -0,0 +1,12 @@
#@TYPE: Distribution
#@NAME: OpenZaurus
#@DESCRIPTION: Distribution configuration for OpenZaurus (http://www.openzaurus.org)
INHERIT += " package_ipk debian"
# For some reason, this doesn't work
# TARGET_OS ?= "linux"
TARGET_OS = "linux"
TARGET_FPU = "soft"
PARALLEL_INSTALL_MODULES = "1"

View File

@ -0,0 +1,13 @@
#
# Specify which versions of E-related libraries and applications to build
#
PREFERRED_VERSION_ecore = "0.9.9.013"
PREFERRED_VERSION_edb = "1.0.5.004"
PREFERRED_VERSION_edje = "0.5.0.013"
PREFERRED_VERSION_eet = "0.9.10.013"
PREFERRED_VERSION_embryo = "0.9.1.013"
PREFERRED_VERSION_etox = "0.9.0.004"
PREFERRED_VERSION_evas = "0.9.9.013"
PREFERRED_VERSION_ewl = "0.0.4.004"
PREFERRED_VERSION_imlib2 = "1.2.1.004"

View File

@ -0,0 +1,86 @@
#
# Specify which versions of GPE (and related) applications to build
#
#work around some breakages
CVSDATE_xserver-kdrive=20050207
CVSDATE_minimo=20050401
CVSDATE_cairo=20050330
CVSDATE_xext=20050222
#set some preferred providers:
PREFERRED_PROVIDER_gconf=gconf-dbus
#specify versions, as the header says :)
PREFERRED_VERSION_libmatchbox ?= "1.6"
PREFERRED_VERSION_matchbox ?= "0.9.1"
PREFERRED_VERSION_matchbox-common ?= "0.9.1"
PREFERRED_VERSION_matchbox-desktop ?= "0.9.1"
PREFERRED_VERSION_matchbox-wm ?= "0.9.3"
PREFERRED_VERSION_matchbox-panel ?= "0.9.1"
PREFERRED_VERSION_matchbox-applet-inputmanager ?= "0.6"
PREFERRED_VERSION_gtk+ ?= "2.6.3"
PREFERRED_VERSION_libgpewidget ?= "0.97"
PREFERRED_VERSION_libgpepimc ?= "0.4"
PREFERRED_VERSION_libgpevtype ?= "0.11"
PREFERRED_VERSION_libschedule ?= "0.14"
PREFERRED_VERSION_gpe-icons ?= "0.24"
PREFERRED_VERSION_libgsm ?= "1.0.10"
PREFERRED_VERSION_diet-x11 ?= "6.2.1"
PREFERRED_VERSION_xproto ?= "6.6.2"
PREFERRED_VERSION_xcursor-transparent-theme ?= "0.1.1"
PREFERRED_VERSION_rxvt-unicode ?= "5.3"
PREFERRED_VERSION_gtk2-theme-angelistic ?= "0.3"
PREFERRED_VERSION_xst ?= "0.14"
#PREFERRED_VERSION_xextensions ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_xprop ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_xhost ?= "0.0cvs20040413"
PREFERRED_VERSION_xrdb ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_gpe-login ?= "0.76"
PREFERRED_VERSION_gpe-session-scripts ?= "0.62"
PREFERRED_VERSION_gpe-soundserver ?= "0.4-1"
PREFERRED_VERSION_gpe-todo ?= "0.50"
PREFERRED_VERSION_gpe-calendar ?= "0.61"
PREFERRED_VERSION_gpe-sketchbox ?= "0.2.8"
PREFERRED_VERSION_gpe-contacts ?= "0.36"
PREFERRED_VERSION_gpe-today ?= "0.08"
PREFERRED_VERSION_matchbox-panel-manager ?= "0.1"
PREFERRED_VERSION_dbus ?= "0.23"
PREFERRED_VERSION_gpe-bluetooth ?= "0.38"
PREFERRED_VERSION_gpe-su ?= "0.18"
PREFERRED_VERSION_gpe-conf ?= "0.1.23"
PREFERRED_VERSION_gpe-clock ?= "0.20"
PREFERRED_VERSION_gpe-mininet ?= "0.5"
PREFERRED_VERSION_gpe-mixer ?= "0.40"
PREFERRED_VERSION_gpe-shield ?= "0.7"
PREFERRED_VERSION_gpe-wlancfg ?= "0.2.6"
PREFERRED_VERSION_gpe-taskmanager ?= "0.17"
PREFERRED_VERSION_keylaunch ?= "2.0.7"
PREFERRED_VERSION_minilite ?= "0.47"
PREFERRED_VERSION_xmonobut ?= "0.4"
PREFERRED_VERSION_gpe-edit ?= "0.28"
PREFERRED_VERSION_gpe-gallery ?= "0.95"
PREFERRED_VERSION_gpe-calculator ?= "0.2"
PREFERRED_VERSION_gpe-package ?= "0.2"
PREFERRED_VERSION_gpe-soundbite ?= "1.0.5"
PREFERRED_VERSION_gpe-terminal ?= "1.1"
PREFERRED_VERSION_gpe-watch ?= "0.10"
PREFERRED_VERSION_gpe-what ?= "0.33"
PREFERRED_VERSION_gpe-filemanager ?= "0.20"
PREFERRED_VERSION_gpe-go ?= "0.05"
PREFERRED_VERSION_gpe-irc ?= "0.07"
PREFERRED_VERSION_gpe-lights ?= "0.13"
#PREFERRED_VERSION_gpe-nmf ?= "0.19"
PREFERRED_VERSION_gpe-othello ?= "0.2-1"
PREFERRED_VERSION_gpe-plucker ?= "0.2"
PREFERRED_VERSION_gpe-tetris ?= "0.6-2"
PREFERRED_VERSION_gsoko ?= "0.4.2-gpe6"
PREFERRED_VERSION_xdemineur ?= "2.1.1"
PREFERRED_VERSION_matchbox-panel-hacks ?= "0.3-1"
PREFERRED_VERSION_rosetta ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_dillo2 ?= "0.6.6"
PREFERRED_VERSION_minimo ?= "0.0cvs${CVSDATE_minimo}"
PREFERRED_VERSION_linphone-hh ?= "0.12.2.hh1"
PREFERRED_VERSION_linphone ?= "0.12.2"
PREFERRED_VERSION_firefox ?= "1.0"

View File

@ -0,0 +1,88 @@
#
# Specify which versions of GPE (and related) applications to build
#
#work around some breakages
#CVSDATE_minimo=20050401
#set some preferred providers:
PREFERRED_PROVIDER_gconf=gconf-dbus
#specify versions, as the header says :)
PREFERRED_VERSION_libmatchbox ?= "1.7"
PREFERRED_VERSION_matchbox ?= "0.9.1"
PREFERRED_VERSION_matchbox-common ?= "0.9.1"
PREFERRED_VERSION_matchbox-desktop ?= "0.9.1"
PREFERRED_VERSION_matchbox-wm ?= "0.9.5"
PREFERRED_VERSION_matchbox-panel ?= "0.9.2"
PREFERRED_VERSION_matchbox-applet-inputmanager ?= "0.6"
PREFERRED_VERSION_atk ?= "1.9.0"
PREFERRED_VERSION_cairo ?= "0.5.2"
PREFERRED_VERSION_glib-2.0 ?= "2.6.4"
PREFERRED_VERSION_gtk+ ?= "2.6.7"
PREFERRED_VERSION_pango ?= "1.8.1"
PREFERRED_VERSION_librsvg ?= "2.6.5"
PREFERRED_VERSION_libgpewidget ?= "0.103"
PREFERRED_VERSION_libgpepimc ?= "0.5"
PREFERRED_VERSION_libgpevtype ?= "0.12"
PREFERRED_VERSION_libschedule ?= "0.15"
PREFERRED_VERSION_gpe-icons ?= "0.24"
PREFERRED_VERSION_libgsm ?= "1.0.10"
PREFERRED_VERSION_diet-x11 ?= "6.2.1"
PREFERRED_VERSION_xproto ?= "6.6.2"
PREFERRED_VERSION_xcursor-transparent-theme ?= "0.1.1"
PREFERRED_VERSION_rxvt-unicode ?= "5.3"
PREFERRED_VERSION_gtk2-theme-angelistic ?= "0.3"
PREFERRED_VERSION_xst ?= "0.14"
#PREFERRED_VERSION_xextensions ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_xprop ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_xhost ?= "0.0cvs20040413"
PREFERRED_VERSION_xrdb ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_gpe-login ?= "0.81"
PREFERRED_VERSION_gpe-session-scripts ?= "0.63"
PREFERRED_VERSION_gpe-soundserver ?= "0.4-1"
PREFERRED_VERSION_gpe-todo ?= "0.54"
PREFERRED_VERSION_gpe-calendar ?= "0.63"
PREFERRED_VERSION_gpe-sketchbox ?= "0.2.8"
PREFERRED_VERSION_gpe-contacts ?= "0.41"
PREFERRED_VERSION_gpe-today ?= "0.08"
PREFERRED_VERSION_matchbox-panel-manager ?= "0.1"
PREFERRED_VERSION_dbus ?= "0.23.4"
PREFERRED_VERSION_gpe-bluetooth ?= "0.43"
PREFERRED_VERSION_gpe-su ?= "0.18"
PREFERRED_VERSION_gpe-conf ?= "0.1.25"
PREFERRED_VERSION_gpe-clock ?= "0.21"
PREFERRED_VERSION_gpe-mininet ?= "0.5"
PREFERRED_VERSION_gpe-mixer ?= "0.42"
PREFERRED_VERSION_gpe-shield ?= "0.7"
PREFERRED_VERSION_gpe-wlancfg ?= "0.2.6"
PREFERRED_VERSION_gpe-taskmanager ?= "0.18"
PREFERRED_VERSION_keylaunch ?= "2.0.7"
PREFERRED_VERSION_minilite ?= "0.49"
PREFERRED_VERSION_xmonobut ?= "0.4"
PREFERRED_VERSION_gpe-edit ?= "0.29"
PREFERRED_VERSION_gpe-gallery ?= "0.96"
PREFERRED_VERSION_gpe-calculator ?= "0.2"
PREFERRED_VERSION_gpe-package ?= "0.3"
PREFERRED_VERSION_gpe-soundbite ?= "1.0.6"
PREFERRED_VERSION_gpe-terminal ?= "1.1"
PREFERRED_VERSION_gpe-watch ?= "0.10"
PREFERRED_VERSION_gpe-what ?= "0.33"
PREFERRED_VERSION_gpe-filemanager ?= "0.23"
PREFERRED_VERSION_gpe-go ?= "0.05"
PREFERRED_VERSION_gpe-irc ?= "0.07"
PREFERRED_VERSION_gpe-lights ?= "0.13"
#PREFERRED_VERSION_gpe-nmf ?= "0.21"
PREFERRED_VERSION_gpe-othello ?= "0.2-1"
PREFERRED_VERSION_gpe-plucker ?= "0.2"
PREFERRED_VERSION_gpe-tetris ?= "0.6-4"
PREFERRED_VERSION_gsoko ?= "0.4.2-gpe6"
PREFERRED_VERSION_xdemineur ?= "2.1.1"
PREFERRED_VERSION_matchbox-panel-hacks ?= "0.3-1"
PREFERRED_VERSION_rosetta ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_dillo2 ?= "0.6.6"
PREFERRED_VERSION_minimo ?= "0.0cvs${CVSDATE_minimo}"
PREFERRED_VERSION_linphone-hh ?= "0.12.2.hh1"
PREFERRED_VERSION_linphone ?= "0.12.2"
PREFERRED_VERSION_firefox ?= "1.0"
PREFERRED_VERSION_gpe_mini_browser ?= "0.15"

View File

@ -0,0 +1,84 @@
#
# Specify which versions of GPE (and related) applications to build
#
#work around some breakages
#CVSDATE_xext=20050222
#set some preferred providers:
PREFERRED_PROVIDER_gconf=gconf-dbus
PREFERRED_PROVIDER_gnome-vfs=gnome-vfs-dbus
#specify versions, as the header says :)
#PREFERRED_VERSION_libmatchbox ?= "1.6"
#PREFERRED_VERSION_matchbox ?= "0.9.1"
#PREFERRED_VERSION_matchbox-common ?= "0.9.1"
#PREFERRED_VERSION_matchbox-desktop ?= "0.9.1"
#PREFERRED_VERSION_matchbox-wm ?= "0.9.3"
#PREFERRED_VERSION_matchbox-panel ?= "0.9.1"
#PREFERRED_VERSION_matchbox-applet-inputmanager ?= "0.6"
#PREFERRED_VERSION_gtk+ ?= "2.6.3"
#PREFERRED_VERSION_libgpewidget ?= "0.97"
#PREFERRED_VERSION_libgpepimc ?= "0.4"
#PREFERRED_VERSION_libgpevtype ?= "0.11"
#PREFERRED_VERSION_libschedule ?= "0.14"
#PREFERRED_VERSION_gpe-icons ?= "0.24"
#PREFERRED_VERSION_libgsm ?= "1.0.10"
PREFERRED_VERSION_diet-x11 ?= "6.2.1"
PREFERRED_VERSION_xproto ?= "6.6.2"
#PREFERRED_VERSION_xcursor-transparent-theme ?= "0.1.1"
PREFERRED_VERSION_rxvt-unicode ?= "5.3"
#PREFERRED_VERSION_gtk2-theme-angelistic ?= "0.3"
#PREFERRED_VERSION_xst ?= "0.14"
PREFERRED_VERSION_xextensions ?= "1.0.1"
PREFERRED_VERSION_xprop ?= "0.0cvs${CVSDATE}"
PREFERRED_VERSION_xhost ?= "0.0cvs20040413"
PREFERRED_VERSION_xrdb ?= "0.0cvs${CVSDATE}"
#PREFERRED_VERSION_gpe-login ?= "0.76"
#PREFERRED_VERSION_gpe-session-scripts ?= "0.62"
#PREFERRED_VERSION_gpe-soundserver ?= "0.4-1"
#PREFERRED_VERSION_gpe-todo ?= "0.50"
#PREFERRED_VERSION_gpe-calendar ?= "0.61"
#PREFERRED_VERSION_gpe-sketchbox ?= "0.2.8"
#PREFERRED_VERSION_gpe-contacts ?= "0.36"
#PREFERRED_VERSION_gpe-today ?= "0.08"
#PREFERRED_VERSION_matchbox-panel-manager ?= "0.1"
PREFERRED_VERSION_dbus ?= "0.23.4"
#PREFERRED_VERSION_gpe-bluetooth ?= "0.38"
#PREFERRED_VERSION_gpe-su ?= "0.18"
#PREFERRED_VERSION_gpe-conf ?= "0.1.23"
#PREFERRED_VERSION_gpe-clock ?= "0.20"
#PREFERRED_VERSION_gpe-mininet ?= "0.5"
#PREFERRED_VERSION_gpe-mixer ?= "0.40"
#PREFERRED_VERSION_gpe-shield ?= "0.7"
#PREFERRED_VERSION_gpe-wlancfg ?= "0.2.6"
#PREFERRED_VERSION_gpe-taskmanager ?= "0.17"
#PREFERRED_VERSION_keylaunch ?= "2.0.7"
#PREFERRED_VERSION_minilite ?= "0.47"
#PREFERRED_VERSION_xmonobut ?= "0.4"
#PREFERRED_VERSION_gpe-edit ?= "0.28"
#PREFERRED_VERSION_gpe-gallery ?= "0.95"
#PREFERRED_VERSION_gpe-calculator ?= "0.2"
#PREFERRED_VERSION_gpe-package ?= "0.2"
#PREFERRED_VERSION_gpe-soundbite ?= "1.0.5"
#PREFERRED_VERSION_gpe-terminal ?= "1.1"
#PREFERRED_VERSION_gpe-watch ?= "0.10"
#PREFERRED_VERSION_gpe-what ?= "0.33"
#PREFERRED_VERSION_gpe-filemanager ?= "0.20"
#PREFERRED_VERSION_gpe-go ?= "0.05"
#PREFERRED_VERSION_gpe-irc ?= "0.07"
#PREFERRED_VERSION_gpe-lights ?= "0.13"
#PREFERRED_VERSION_gpe-nmf ?= "0.19"
#PREFERRED_VERSION_gpe-othello ?= "0.2-1"
#PREFERRED_VERSION_gpe-plucker ?= "0.2"
#PREFERRED_VERSION_gpe-tetris ?= "0.6-2"
#PREFERRED_VERSION_gsoko ?= "0.4.2-gpe6"
#PREFERRED_VERSION_xdemineur ?= "2.1.1"
#PREFERRED_VERSION_matchbox-panel-hacks ?= "0.3-1"
PREFERRED_VERSION_rosetta ?= "0.0cvs${CVSDATE}"
#PREFERRED_VERSION_dillo2 ?= "0.6.6"
PREFERRED_VERSION_minimo ?= "0.0cvs${CVSDATE_minimo}"
#PREFERRED_VERSION_linphone-hh ?= "0.12.2.hh1"
#PREFERRED_VERSION_linphone ?= "0.12.2"
#PREFERRED_VERSION_firefox ?= "1.0"

View File

@ -0,0 +1,192 @@
#
# A list of applications.
#
PREFERRED_VERSION_libopie2 = "${OPIE_VERSION}"
PREFERRED_VERSION_libopieobex0 = "${OPIE_VERSION}"
PREFERRED_VERSION_libopietooth1 = "${OPIE_VERSION}"
PREFERRED_VERSION_libqpe-opie = "${OPIE_VERSION}"
PREFERRED_VERSION_libqtaux2 = "${OPIE_VERSION}"
PREFERRED_VERSION_libmailwrapper = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-aboutapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-addressbook = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-advancedfm = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-alarm = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-appearance = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-aqpkg = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-autorotateapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-backgammon = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-backup = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-bartender = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-batteryapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-bluepin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-bluetoothapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-bluetoothmanager = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-bounce = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-brightnessapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-button-settings = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-buzzword = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-calculator = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-calibrate = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-camera = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-cardmon = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-checkbook = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-citytime = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-clipboardapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-clockapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-clock = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-confeditor = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-console = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-dagger = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-dasher = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-datebook = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-datebook-chrisholidayplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-datebook-nationalholidayplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-datebook-birthdayplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-deco-flat = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-deco-liquid = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-deco-polished = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-doctab = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-drawpad = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-dvorak = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-embeddedkonsole = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-euroconv = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-examples-python = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-eye = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-fifteen = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-formatter = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-freetype = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-ftplib = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-ftp = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-go = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-gutenbrowser = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-handwriting = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-helpbrowser = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-help-en = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-homeapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-i18n = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-lrelease-native = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-lupdate-native = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-irc = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-irdaapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-jumpx = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-kbill = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-kcheckers = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-keyboard = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-keypebble = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-keytabs = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-keyview = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-kjumpx = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-kpacman = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-language = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-launcher-settings = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-libqrsync = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-light-and-power = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-login = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-logoutapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mailapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mail = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer1-libmadplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer1-libmodplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer1-libtremorplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer1-libwavplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer1 = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer2 = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer2-skin-default = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer2-skin-default-landscape = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer2-skin-pod = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediaplayer2-skin-techno = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mediummount = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-memoryapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mindbreaker = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-minesweep = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-mobilemsg = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-multikeyapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-multikey = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-networkapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-networksettings = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-notesapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-odict = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-osearch = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-oxygen = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-oyatzee = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-packagemanager = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-parashoot = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-pcmciaapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-performance = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-pickboard = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-pics = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-pimconverter = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-powerchord = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-pyquicklaunchapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-qasteroids = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-qcop = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-quicklauncher = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-qss = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-rdesktop = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-reader = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-recorder = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-remote = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-restartapplet2 = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-restartapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-restartapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-rotateapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-screenshotapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-search = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-security = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-securityplugin-blueping = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-securityplugin-dummy = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-securityplugin-notice = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-securityplugin-pin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-sfcave = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-sheet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-sh = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-showimg = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-snake = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-solitaire = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-sshkeys = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-style-flat = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-style-fresh = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-style-liquid = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-style-metal = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-style-phase = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-style-web = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-sounds = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-suspendapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-symlinker = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-sysinfo = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-systemtime = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-tableviewer = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-tabmanager = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-taskbar = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-tetrix = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-textedit = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-theme = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-tictac = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-tinykate = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today-addressbookplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today-datebookplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today-fortuneplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today-mailplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today-stocktickerplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today-todolistplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-today-weatherplugin = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-todo = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-tonleiter = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-unikeyboard = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-usermanager = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-vmemo = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-vmemo-settings = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-volumeapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-vtapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-wellenreiter = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-wirelessapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-wordgame = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-write = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-zkbapplet = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-zlines = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-zsafe = "${OPIE_VERSION}"
PREFERRED_VERSION_opie-zsame = "${OPIE_VERSION}"
PREFERRED_VERSION_konqueror-embedded = "20030705"

View File

@ -0,0 +1,75 @@
# this file holds documentation for known keys, possible values and their meaning
# feel free to send updates and corrections to oe@handhelds.org
# conf/bitbake.conf
PREFERRED_VERSION[doc] = "Normally use it as PREFERRED_VERSION_package-name = \"\" to set the preferred \
version of more than one version for the package-name is available."
BUILD_ARCH[doc] = "The name of the building architecture. For example i686"
BUILD_OS[doc] = "The operating system (in lower case) of the building architecture. For example \
linux"
BUILD_VENDOR[doc] = "FIXME"
BUILD_SYS[doc] = "FIXME"
BUILD_PREFIX[doc] = "FIXME"
BUILD_CC_ARCH[doc] = "FIXME"
HOST_ARCH[doc] = "The name of the target architecture. Normally same as the TARGET_ARCH @see TARGET_ARCH @group base"
HOST_OS[doc] = "The name of the target operating system. Normally the same as the TARGET_OS \
@see TARGET_OS @group base"
HOST_VENDOR[doc] = "The name of the vendor. Normally same as the TARGET_VENDOR @see TARGET_VENDOR"
HOST_SYS[doc] = "FIXME"
HOST_PREFIX[doc] = "Normally same as the TARGET_PREFIX @see TARGET_PREFIX @group base"
HOST_CC_ARCH[doc] = "Normally same as the TARGET_CC_ARCH @see TARGET_CC_ARCH @group base"
TARGET_ARCH[doc] = "Build for which architecture. Examples are arm, i686, sh3, mips, powerpc"
TARGET_OS[doc] = "Build for which Operating System. Currently possible values are \
linux and linux-uclibc. When using linux-uclibc you might want to set USE_NLS_gcc-cross to \"no\"."
TARGET_VENDOR[doc] = "FIXME"
TARGET_SYS[doc] = "The target system is composed out of TARGET_ARCH,TARGET_VENDOR and TARGET_OS"
TARGET_PREFIX[doc] = "The prefix for the cross compile toolchain. For example arm-linux-"
TARGET_CC_ARCH[doc] = "FIXME"
TARGET_FPU[doc] = "Floating point option (mostly for FPU-less systems), can be 'soft' or empty"
PACKAGE_ARCH[doc] = "The architecture needed for using a resulting package. If you have \
machine dependant configuration options in your bitbake file add a \
PACKAGE_ARCH = \"${MACHINE_ARCH}\" to the file"
DATE[doc] = "The date the build was started Ymd"
TIME[doc] = "The time the build was started HMS"
DATETIME[doc] = "The date and time the build was started at"
PN[doc] = "PN holds the name of the package (Package Name). It is gathered from the bitbake-file filename"
PV[doc] = "PV holds the version of the package (Package Version). The default value is 1.0, it is \
retrieved from the bitbake-file filename."
PR[doc] = "PR is the revision of the package (Package Revision). By default it is set to r0"
PF[doc] = "Package name - Package version - Package revision (PN-PV-PR)"
P[doc] = "Package name - Package version (PN-PF)"
S[doc] = "Directory that holds the sources to build, WORKDIR/PN-PV by default. The 'make' or equivalent command is run from here"
SECTION[doc] = "Section of the packages e.g. console/networking"
PRIORITY[doc] = "Importance of package, default values are 'optional' or 'needed'"
DESCRIPTION[doc] = "A small description of the package"
LICENSE[doc] = "The license of the resulting package e.g. GPL"
AUTHOR[doc] = "The author of the stuff contained in the package"
MAINTAINER[doc] = "Maintainer of the .bb and the resulting package"
HOMEPAGE[doc] = "Homepage of the package e.g. http://www.project.net"
DEPENDS[doc] = "Build time dependencies, things needed to build the package"
RDEPENDS[doc] = "Run time dependencies, things needed for a given package to run"
PROVIDES[doc] = "Names for additional dependencies that this package will provide"
PACKAGES[doc] = "Names of packages to be generated"
FILES[doc] = "Files/Directories belonging to the package"
TMPDIR[doc] = "The temporary directory of openembedded holding work-, deploy, staging- and some more dirs"
CACHE[doc] = "The directory holding the parsing cache."
DL_DIR[doc] = "The directory where tarballs will be downloaded."
CVSDIR[doc] = "The directory where cvs checkouts will be stored in."
STAMP[doc] = "The directory that holds files to keep track of what was built"
WORKDIR[doc] = "The directory where a concrete package will be unpacked and built"
T[doc] = "Temporary directory within the WORKDIR"

View File

@ -0,0 +1,5 @@
SRC_DISTRIBUTE_LICENSES += "GPL GPLv2 BSD LGPL Apache-2.0 QPL AFL"
SRC_DISTRIBUTE_LICENSES += "MIT Sleepycat Classpath Perl PSF PD Artistic"
SRC_DISTRIBUTE_LICENSES += "bzip2 zlib ntp cron libpng netperf openssl"
SRC_DISTRIBUTE_LICENSES += "Info-ZIP tcp-wrappers"

View File

@ -0,0 +1,122 @@
#
# OpenEmbedded local configuration file (sample)
#
# Please visit the Wiki at http://openembedded.org/ for more info.
#
#
# Be SURE to read this file in its entirety and the GettingStarted page on the
# wiki before proceeding.
#
# Once you have done that, remove the line at the end of this
# file and build away.
#
# WARNING: lines starting with a space (' ') will result in parse failures.
# Remove '# ' from commented lines to activate them.
#
# NOTE: Do NOT use $HOME in your paths, BitBake does NOT expand ~ for you. If you
# must have paths relative to your homedir use ${HOME} (note the {}'s there
# you MUST have them for the variable expansion to be done by BitBake). Your
# paths should all be absolute paths (They should all start with a / after
# expansion. Stuff like starting with ${HOME} or ${TOPDIR} is ok).
# Use this to specify where BitBake should place the downloaded sources into
DL_DIR = "${HOME}/sources"
# Delete the line below. Then specify which .bb files to consider for
# your build. Typically this will be something like BBFILES = "/path/to/openembedded/packages/*/*.bb"
BBFILES := "${@bb.fatal('Edit your conf/local.conf: BBFILES')}"
# Use the BBMASK below to instruct BitBake to _NOT_ consider some .bb files
# This is a regulary expression, so be sure to get your parenthesis balanced.
BBMASK = ""
# Uncomment this if you want to use a prebuilt toolchain. You will need to
# provide packages for toolchain and additional libraries yourself. You also
# have to set PATH in your environment to make sure BitBake finds additional binaries.
# Note: You will definitely need to say:
# ASSUME_PROVIDED = "virtual/arm-linux-gcc-2.95"
# to build any of two Linux 2.4 Embedix kernels,
# i.e. openzaurus-sa-2.4.18 and openzaurus-pxa-2.4.18 - and don't forget
# to rename the binaries as instructed in the Wiki.
# ASSUME_PROVIDED = "virtual/${TARGET_PREFIX}gcc virtual/libc"
# Select between multiple alternative providers, if more than one is eligible.
PREFERRED_PROVIDERS = "virtual/qte:qte virtual/libqpe:libqpe-opie"
PREFERRED_PROVIDERS += " virtual/libsdl:libsdl-qpe"
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc-initial:gcc-cross-initial"
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}gcc:gcc-cross"
PREFERRED_PROVIDERS += " virtual/${TARGET_PREFIX}g++:gcc-cross"
# Uncomment this to specify where BitBake should create its temporary files.
# Note that a full build of everything in OpenEmbedded will take GigaBytes of hard
# disk space, so make sure to free enough space. The default TMPDIR is
# <build directory>/tmp
# Don't use symlinks in in the path to avoid problems
# TMPDIR = /usr/local/projects/oetmp
# Uncomment this to specify a machine to build for. See the conf directory
# for machines currently known to OpenEmbedded.
# MACHINE = "collie"
# Use this to specify the target architecture. Note that this is only
# needed when building for a machine not known to OpenEmbedded. Better use
# the MACHINE attribute (see above)
# TARGET_ARCH = "arm"
# Use this to specify the target operating system. The default is "linux",
# for a normal linux system with glibc. Set this to "linux-uclibc" if you want
# to build a uclibc based system.
# TARGET_OS = "linux"
# TARGET_OS = "linux-uclibc"
# Uncomment this to select a distribution policy. See the conf directory
# for distributions currently known to OpenEmbedded.
# DISTRO = "familiar"
# Uncomment this to select a particular kernel version if supported by
# your MACHINE setting. Currently only supported on Zaurus Clamshells.
# KERNEL_VERSION = "2.6"
# Uncomment one of these to build packages during the build process.
# This is done automatically if you set DISTRO (see above)
# INHERIT = "package_ipk"
# INHERIT = "package_tar"
# Add the required image file system types below. Valid are jffs2, tar, cramfs
IMAGE_FSTYPES = "jffs2 tar"
# Uncomment this to disable the parse cache (not recommended).
# CACHE = ""
# Uncomment this if you want BitBake to emit debugging output
# BBDEBUG = "yes"
# Uncomment these two if you want BitBake to build images useful for debugging.
# DEBUG_BUILD = "1"
# INHIBIT_PACKAGE_STRIP = "1"
# Uncomment these to build a package such that you can use gprof to profile it.
# NOTE: This will only work with 'linux' targets, not
# 'linux-uclibc', as uClibc doesn't provide the necessary
# object files. Also, don't build glibc itself with these
# flags, or it'll fail to build.
#
# PROFILE_OPTIMIZATION = "-pg"
# SELECTED_OPTIMIZATION = "${PROFILE_OPTIMIZATION}"
# LDFLAGS =+ "-pg"
# Uncomment this to enable parallel make.
# This allows make to spawn mutliple processes to take advantage of multiple
# processors. Useful on SMP machines. This may break some packages - we're
# in the process of marking these so let us know if you find any.
# PARALLEL_MAKE = "-j 4"
# Uncomment this if you want BitBake to emit the log if a build fails.
BBINCLUDELOGS = "yes"
# Specifies a location to search for pre-generated tarballs when fetching
# a cvs:// URI. Outcomment this, if you always want to pull directly from CVS.
CVS_TARBALL_STASH = "http://www.oesources.org/source/current/"
# EDIT THIS FILE and then remove the line below before using!
REMOVE_THIS_LINE:="${@oe.fatal('Read the comments in your conf/local.conf')}"

View File

@ -0,0 +1,12 @@
#@TYPE: Machine
#@NAME: Sharp Zaurus SL-C7x0
#@DESCRIPTION: Machine configuration for the Sharp Zaurus SL-C700, Sharp Zaurus SL-C750, Sharp Zaurus SL-C760, Sharp Zaurus SL-C860 devices
KERNEL_VERSION ?= "2.6"
include conf/machine/zaurus-clamshell.conf
include conf/machine/zaurus-clamshell-${KERNEL_VERSION}.conf
ROOT_FLASH_SIZE = "32"
# yes, the husky has 64MB, I know... Be happy, that way you can install more software :)

View File

@ -0,0 +1,24 @@
HANDHELD_MODULES = "ipv6 \
ipsec \
nvrd \
mip6-mn \
tun \
ide-cs ide-disk ide-probe-mod \
loop \
vfat ext2 \
sunrpc nfs \
btuart-cs dtl1-cs bt3c-cs rfcomm bnep l2cap sco hci_uart \
pcnet-cs serial-cs airo-cs \
af_packet \
ppp-async ppp-deflate ppp-mppe \
ip-gre ip-tables ipip \
irda irlan irnet irport irtty ircomm-tty \
input uinput \
"
# Configuration bits for "generic handheld"
BOOTSTRAP_EXTRA_DEPENDS += "pcmcia-cs apmd ppp wireless-tools irda-utils openswan wpa-supplicant-nossl lrzsz ppp-dialin scap network-suspend-scripts"
BOOTSTRAP_EXTRA_RDEPENDS += "pcmcia-cs apm apmd network-suspend-scripts"
BOOTSTRAP_EXTRA_RRECOMMENDS += "ppp wireless-tools irda-utils openswan wpa-supplicant-nossl lrzsz ppp-dialin scap ${@linux_module_packages('${HANDHELD_MODULES}', d)}"
INHERIT += "linux_modules"

View File

@ -0,0 +1,9 @@
IPAQ_MODULES = "apm h3600_generic_sleeve ipaq-sleeve ipaq-mtd-asset nvrd atmelwlandriver sa1100-rtc ipaq-hal h3600_ts usb-eth wavelan_cs keybdev"
BOOTSTRAP_EXTRA_DEPENDS += "kbdd bl hostap-modules orinoco-modules atmelwlandriver"
BOOTSTRAP_EXTRA_DEPENDS_append_kernel24 += "mipv6"
BOOTSTRAP_EXTRA_RRECOMMENDS += "kbdd bl hostap-modules-cs orinoco-modules-cs atmelwlandriver ${@linux_module_packages('${IPAQ_MODULES}', d)}"
BOOTSTRAP_EXTRA_RRECOMMENDS_append_kernel24 += "mipv6"
PREFERRED_PROVIDER_virtual/xserver = "xserver-kdrive"

View File

@ -0,0 +1,33 @@
#@TYPE: Machine
#@NAME: iPAQ hx47xx hx2xxx
#@DESCRIPTION: Machine configuration for the iPAQ with a pxa27x CPU devices
TARGET_ARCH = "arm"
IPKG_ARCHS = "all arm armv4 armv5te ipaqpxa hx4700"
PREFERRED_PROVIDER_virtual/kernel = "handhelds-pxa-2.6"
PREFERRED_VERSION_orinoco-modules = "0.15rc1"
BOOTSTRAP_EXTRA_DEPENDS = "ipaq-boot-params"
BOOTSTRAP_EXTRA_DEPENDS_append = " handhelds-pxa-2.6 udev modutils-collateral module-init-tools"
BOOTSTRAP_EXTRA_RDEPENDS = "kernel ipaq-boot-params ${@linux_module_packages('${PXA270_MODULES}', d)}"
BOOTSTRAP_EXTRA_RDEPENDS_append = " udev modutils-collateral module-init-tools"
PXA270_MODULES = "g_ether pxa27x_udc"
SERIAL_CONSOLE = "115200 ttyS0"
include conf/machine/ipaq-common.conf
include conf/machine/handheld-common.conf
GUI_MACHINE_CLASS = "bigscreen"
GPE_EXTRA_DEPENDS += "gaim sylpheed gpe-filemanager gpe-nmf evince"
GPE_EXTRA_INSTALL += "gaim sylpheed gpe-filemanager gpe-nmf evince"
# Use tune-xscale per default. Machine independent feeds should be built with tune-strongarm.
include conf/machine/tune-xscale.conf
BOOTSTRAP_EXTRA_RDEPENDS += "${@linux_module_packages('${HX4700_MODULES}', d)}"
HX4700_MODULES = "asic3_base asic3_mmc hx4700_bt hx4700_core hx4700_leds hx4700_navpt hx4700_pcmcia \
hx4700_ts hx4700_wlan snd_pxa_i2sound_hx4700"

View File

@ -0,0 +1,8 @@
#@TYPE: Machine
#@NAME: HP Jornada 7xx
#@DESCRIPTION: Machine configuration for the SA1100 based HP Jornada 7xx palmtop computer
TARGET_ARCH = "arm"
IPKG_ARCHS = "all arm ${MACHINE}"
include conf/machine/tune-strongarm.conf

View File

@ -0,0 +1,58 @@
#@TYPE: Machine
#@NAME: Nokia 770 internet tablet
#@DESCRIPTION: Machine configuration for the Nokia 770, an omap 1710 based tablet
TARGET_ARCH = "arm"
IPKG_ARCHS = "all arm armv4 armv5te"
PREFERRED_PROVIDER_virtual/xserver = "xserver-kdrive-omap"
PREFERRED_PROVIDER_virtual/bootloader = ""
XSERVER = "xserver-kdrive-omap"
# 800x480 is big enough for me
GUI_MACHINE_CLASS = "bigscreen"
GPE_EXTRA_DEPENDS += "gaim sylpheed gpe-mini-browser tscalib"
GPE_EXTRA_INSTALL += "gaim sylpheed gpe-mini-browser tscalib"
# Use tune-arm926 per default. Machine independent feeds should be built with tune-strongarm.
#include conf/machine/tune-arm926ejs.conf
#size of the root partition (yes, it is 123 MB)
ROOT_FLASH_SIZE = "123"
EXTRA_IMAGECMD_jffs2_nokia770 = "--pad --little-endian --eraseblock=0x20000 --no-cleanmarkers"
# serial console port on devboard rev. B3
#SERIAL_CONSOLE = "115200 tts/0"
SERIAL_CONSOLE = "115200 ttyS0"
#build omap1 till nokia releases the patches
PREFERRED_PROVIDER_virtual/kernel = "linux-omap1"
BOOTSTRAP_EXTRA_DEPENDS += "virtual/kernel sysfsutils nokia770-init \
pcmcia-cs apmd ppp wireless-tools console-tools openswan wpa-supplicant-nossl lrzsz ppp-dialin"
BOOTSTRAP_EXTRA_RDEPENDS += "sysfsutils nokia770-init \
pcmcia-cs apm ppp wireless-tools console-tools"
# NFS Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-nfs kernel-module-lockd kernel-module-sunrpc"
# Crypto Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-des kernel-module-md5"
# SMB and CRAMFS
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-smbfs kernel-module-cramfs"
# Serial Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-8250 "
# Bluetooth Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-bluetooth kernel-module-l2cap kernel-module-rfcomm kernel-module-hci-vhci \
# kernel-module-bnep kernel-module-hidp kernel-module-hci-uart kernel-module-sco"
# PPP Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-ppp-generic kernel-module-ppp-async"
#USB Gadget Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-gadgetfs kernel-module-g-file-storage kernel-module-g-serial \
# kernel-module-g-ether"
include conf/machine/handheld-common.conf

View File

@ -0,0 +1,20 @@
#@TYPE: Machine
#@NAME: Sharp Zaurus SL-C3000
#@DESCRIPTION: Machine configuration for the Sharp Zaurus SL-C3000 device
KERNEL_VERSION ?= "2.4"
include conf/machine/zaurus-clamshell.conf
include conf/machine/zaurus-clamshell-${KERNEL_VERSION}.conf
PIVOTBOOT_EXTRA_DEPENDS += "pivotinit pcmcia-cs"
PIVOTBOOT_EXTRA_RDEPENDS += "pivotinit pcmcia-cs"
PIVOTBOOT_EXTRA_RRECOMMENDS += ""
# Useful things for the built-in Harddisk
BOOTSTRAP_EXTRA_DEPENDS += "hdparm e2fsprogs"
BOOTSTRAP_EXTRA_RDEPENDS += "hdparm e2fsprogs e2fsprogs-e2fsck e2fsprogs-mke2fs"
IMAGE_FSTYPES = "jffs2 tar"
ROOT_FLASH_SIZE = "100"

View File

@ -0,0 +1,2 @@
TARGET_CC_ARCH = "-march=armv4t -mtune=arm920t"

View File

@ -0,0 +1,2 @@
TARGET_CC_ARCH = "-march=armv5te -mtune=arm926ej-s"
PACKAGE_ARCH = "armv5te"

View File

@ -0,0 +1 @@
TARGET_CC_ARCH = "-mcpu=arm9tdmi -mtune=arm9tdmi"

View File

@ -0,0 +1,2 @@
TARGET_CC_ARCH = "-march=armv4 -mtune=xscale"

View File

@ -0,0 +1,3 @@
TARGET_CC_ARCH = "-march=armv5te -mtune=xscale"
PACKAGE_ARCH = "armv5te"

View File

@ -0,0 +1,48 @@
# Zaurus Clamshell specific configuration for kernel 2.4
# Don't forget to include zaurus-clamshell.conf as well
PREFERRED_PROVIDERS_append_c7x0 = " virtual/kernel:c7x0-kernels-2.4-embedix"
PREFERRED_PROVIDERS_append_akita = " virtual/kernel:openzaurus-pxa27x"
PREFERRED_PROVIDERS_append_spitz = " virtual/kernel:openzaurus-pxa27x"
PREFERRED_PROVIDERS_append_borzoi = " virtual/kernel:openzaurus-pxa27x"
# Later versions are broken on kernel 2.4
PREFERRED_VERSION_orinoco-modules = "0.13e"
BOOTSTRAP_EXTRA_DEPENDS += "virtual/kernel hostap-modules orinoco-modules sharp-sdmmc-support \
pcmcia-cs apmd wireless-tools console-tools irda-utils lrzsz hostap-utils prism3-firmware prism3-support \
wpa-supplicant-nossl ppp ppp-dialin openswan"
BOOTSTRAP_EXTRA_RDEPENDS += "kernel hostap-modules-cs orinoco-modules-cs sharp-sdmmc-support \
pcmcia-cs apmd apm wireless-tools console-tools irda-utils hostap-utils prism3-firmware prism3-support \
wpa-supplicant-nossl ppp ppp-dialin openswan"
# Misc Modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-net-fd kernel-module-registers"
# USB Client Modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-usbdcore kernel-module-usbdmonitor"
# Bluetooth Modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-bluez kernel-module-l2cap kernel-module-rfcomm kernel-module-hci-vhci \
kernel-module-bnep kernel-module-hidp kernel-module-hci-uart kernel-module-sco \
kernel-module-bluecard-cs kernel-module-btuart-cs kernel-module-dtl1-cs \
kernel-module-firmware-class kernel-module-bt3c-cs"
# BI Module
BOOTSTRAP_EXTRA_RDEPENDS_append_c7x0 = " kernel-module-pxa-bi"
BOOTSTRAP_EXTRA_RDEPENDS_append_spitz = " kernel-module-pxa27x-bi"
BOOTSTRAP_EXTRA_RDEPENDS_append_akita = " kernel-module-pxa27x-bi"
BOOTSTRAP_EXTRA_RDEPENDS_append_borzoi = " kernel-module-pxa27x-bi"
# USB Host Modules
BOOTSTRAP_EXTRA_RDEPENDS_append_spitz = " kernel-module-usbcore kernel-module-usbmouse kernel-module-usb-monitor \
kernel-module-usbkbd kernel-module-usb-storage kernel-module-hci-usb \
kernel-module-usb-ohci-pxa27x kernel-module-bfusb"
BOOTSTRAP_EXTRA_RDEPENDS_append_akita = " kernel-module-usbcore kernel-module-usbmouse kernel-module-usb-monitor \
kernel-module-usbkbd kernel-module-usb-storage kernel-module-hci-usb \
kernel-module-usb-ohci-pxa27x kernel-module-bfusb"
BOOTSTRAP_EXTRA_RDEPENDS_append_borzoi = " kernel-module-usbcore kernel-module-usbmouse kernel-module-usb-monitor \
kernel-module-usbkbd kernel-module-usb-storage kernel-module-hci-usb \
kernel-module-usb-ohci-pxa27x kernel-module-bfusb"

View File

@ -0,0 +1,36 @@
# Zaurus Clamshell specific configuration for kernel 2.6
# Don't forget to include zaurus-clamshell.conf as well
PREFERRED_PROVIDER_virtual/kernel = "linux-openzaurus"
PREFERRED_VERSION_orinoco-modules = "0.15rc1"
BOOTSTRAP_EXTRA_DEPENDS += "virtual/kernel udev sysfsutils hostap-modules orinoco-modules \
pcmcia-cs apmd wireless-tools console-tools irda-utils lrzsz hostap-utils prism3-firmware prism3-support \
ppp ppp-dialin openswan wpa-supplicant-nossl"
BOOTSTRAP_EXTRA_RDEPENDS += "kernel udev sysfsutils hostap-modules-cs orinoco-modules-cs \
pcmcia-cs apm wireless-tools irda-utils udev-utils console-tools hostap-utils prism3-firmware prism3-support \
ppp ppp-dialin openswan wpa-supplicant-nossl"
# Ethernet modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-pcnet-cs"
# NFS Modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-nfs kernel-module-lockd kernel-module-sunrpc"
# Crypto Modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-des kernel-module-md5"
# SMB and CRAMFS
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-smbfs kernel-module-cramfs"
# Serial Modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-8250 kernel-module-serial-cs"
# Bluetooth Modules
BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-bluetooth kernel-module-l2cap kernel-module-rfcomm kernel-module-hci-vhci \
kernel-module-bnep kernel-module-hidp kernel-module-hci-uart kernel-module-sco \
kernel-module-bt3c-cs kernel-module-bluecard-cs kernel-module-btuart-cs kernel-module-dtl1-cs"
# Infrared Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-pxaficp-ir kernel-module-irda kernel-module-ircomm
# kernel-module-ircomm-tty kernel-module-irlan kernel-module-irnet kernel-module-ir-usb"
# USB Gadget Modules
#BOOTSTRAP_EXTRA_RDEPENDS += "kernel-module-gadgetfs kernel-module-g-file-storage \
# kernel-module-g-serial kernel-module-g-ether"

Some files were not shown because too many files have changed in this diff Show More