classes: Sync with OE

git-svn-id: https://svn.o-hand.com/repos/poky/trunk@651 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
Richard Purdie 2006-08-27 16:01:33 +00:00
parent 37d03b1b00
commit 93a8d0662e
12 changed files with 155 additions and 77 deletions

View File

@ -64,7 +64,7 @@ def base_set_filespath(path, d):
overrides = overrides + ":"
for o in overrides.split(":"):
filespath.append(os.path.join(p, o))
bb.data.setVar("FILESPATH", ":".join(filespath), d)
return ":".join(filespath)
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
@ -187,7 +187,7 @@ oe_libinstall() {
dir=`pwd`
fi
dotlai=$libname.lai
dir=$dir`(cd $dir; find -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
olddir=`pwd`
__runcmd cd $dir
@ -413,10 +413,10 @@ def oe_unpack_file(file, data, url = None):
destdir = "."
bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
if not cmd:
return True
dest = os.path.join(os.getcwd(), os.path.basename(file))
if os.path.exists(dest):
if os.path.samefile(file, dest):
@ -478,6 +478,7 @@ python base_eventhandler() {
msg += messages.get(name[5:]) or name[5:]
elif name == "UnsatisfiedDep":
msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
if msg:
note(msg)
if name.startswith("BuildStarted"):
@ -486,7 +487,7 @@ python base_eventhandler() {
path_to_packages = path_to_bbfiles[:path_to_bbfiles.rindex( "packages" )]
monotone_revision = "<unknown>"
try:
monotone_revision = file( "%s/MT/revision" % path_to_packages ).read().strip()
monotone_revision = file( "%s/_MTN/revision" % path_to_packages ).read().strip()
except IOError:
pass
bb.data.setVar( 'OE_REVISION', monotone_revision, e.data )
@ -519,6 +520,7 @@ python base_eventhandler() {
addtask configure after do_unpack do_patch
do_configure[dirs] = "${S} ${B}"
do_configure[bbdepcmd] = "do_populate_staging"
do_configure[deptask] = "do_populate_staging"
base_do_configure() {
:
}
@ -645,7 +647,7 @@ python __anonymous () {
if need_machine:
import re
this_machine = bb.data.getVar('MACHINE', d, 1)
if not re.match(need_machine, this_machine):
if this_machine and not re.match(need_machine, this_machine):
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
pn = bb.data.getVar('PN', d, 1)
@ -719,12 +721,18 @@ ftp://ftp.kernel.org/pub ftp://ftp.jp.kernel.org/pub
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
ftp://.*/.*/ http://www.oesources.org/source/current/
http://.*/.*/ http://www.oesources.org/source/current/
}

View File

@ -1,7 +1,8 @@
FILES_${PN} += '${libdir}/perl5'
EXTRA_CPANFLAGS = ""
cpan_do_configure () {
perl Makefile.PL
perl Makefile.PL ${EXTRA_CPANFLAGS}
if [ "${BUILD_SYS}" != "${HOST_SYS}" ]; then
. ${STAGING_DIR}/${TARGET_SYS}/perl/config.sh
sed -e "s:\(SITELIBEXP = \).*:\1${sitelibexp}:; s:\(SITEARCHEXP = \).*:\1${sitearchexp}:; s:\(INSTALLVENDORLIB = \).*:\1${D}${libdir}/perl5:; s:\(INSTALLVENDORARCH = \).*:\1${D}${libdir}/perl5:" < Makefile > Makefile.new

View File

@ -44,6 +44,6 @@ do_stage_append () {
}
PACKAGES = "${PN} ${PN}-themes ${PN}-dev ${PN}-examples"
FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a"
FILES_${PN}-dev = "${bindir}/${PN}-config ${libdir}/pkgconfig ${libdir}/lib*.?a ${libdir}/lib*.a ${includedir}"
FILES_${PN}-examples = "${bindir} ${datadir}"

View File

@ -4,6 +4,14 @@
# ICECC_VERSION, ICECC_CXX and ICEC_CC
#
def icc_determine_gcc_version(gcc):
"""
Hack to determine the version of GCC
'i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 (Apple Computer, Inc. build 5363)'
"""
return os.popen("%s --version" % gcc ).readline()[2]
def create_env(bb,d):
"""
Create a tar.bz of the current toolchain
@ -23,51 +31,66 @@ def create_env(bb,d):
prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d)
target_sys = bb.data.expand('${TARGET_SYS}', d)
#float = bb.data.getVar('${TARGET_FPU}', d)
float = "anyfloat"
float = bb.data.getVar('${TARGET_FPU}', d) or "hard"
name = socket.gethostname()
# Stupid check to determine if we have built a libc and a cross
# compiler.
try:
os.stat(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2')
os.stat(ice_dir + '/' + target_sys + '/bin/g++')
os.stat(os.path.join(ice_dir, target_sys, 'lib', 'ld-linux.so.2'))
os.stat(os.path.join(ice_dir, target_sys, 'bin', 'g++'))
except:
return ""
VERSION = '3.4.3'
VERSION = icc_determine_gcc_version( os.path.join(ice_dir,target_sys,"bin","g++") )
cross_name = prefix + distro + target_sys + float +VERSION+ name
tar_file = ice_dir + '/ice/' + cross_name + '.tar.bz2'
tar_file = os.path.join(ice_dir, 'ice', cross_name + '.tar.bz2')
try:
os.stat(tar_file)
return tar_file
except:
try:
os.makedirs(ice_dir+'/ice')
os.makedirs(os.path.join(ice_dir,'ice'))
except:
pass
# FIXME find out the version of the compiler
# Consider using -print-prog-name={cc1,cc1plus}
# and -print-file-name=specs
# We will use the GCC to tell us which tools to use
# What we need is:
# -gcc
# -g++
# -as
# -cc1
# -cc1plus
# and we add them to /usr/bin
tar = tarfile.open(tar_file, 'w:bz2')
tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
target_sys + 'cross/lib/ld-linux.so.2')
tar.add(ice_dir + '/' + target_sys + '/lib/ld-linux.so.2',
target_sys + 'cross/lib/ld-2.3.3.so')
tar.add(ice_dir + '/' + target_sys + '/lib/libc-2.3.3.so',
target_sys + 'cross/lib/libc-2.3.3.so')
tar.add(ice_dir + '/' + target_sys + '/lib/libc.so.6',
target_sys + 'cross/lib/libc.so.6')
tar.add(ice_dir + '/' + target_sys + '/bin/gcc',
target_sys + 'cross/usr/bin/gcc')
tar.add(ice_dir + '/' + target_sys + '/bin/g++',
target_sys + 'cross/usr/bin/g++')
tar.add(ice_dir + '/' + target_sys + '/bin/as',
target_sys + 'cross/usr/bin/as')
tar.add(ice_dir + '/lib/gcc/' + target_sys +'/'+ VERSION + '/specs',
target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/specs')
tar.add(ice_dir + '/libexec/gcc/'+target_sys+'/' + VERSION + '/cc1',
target_sys + 'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1')
tar.add(ice_dir + '/libexec/gcc/arm-linux/' + VERSION + '/cc1plus',
target_sys+'cross/usr/lib/gcc/'+target_sys+'/'+VERSION+'/lib/cc1plus')
# Now add the required files
tar.add(os.path.join(ice_dir,target_sys,'bin','gcc'),
os.path.join("usr","bin","gcc") )
tar.add(os.path.join(ice_dir,target_sys,'bin','g++'),
os.path.join("usr","bin","g++") )
tar.add(os.path.join(ice_dir,target_sys,'bin','as'),
os.path.join("usr","bin","as") )
# Now let us find cc1 and cc1plus
cc1 = os.popen("%s -print-prog-name=cc1" % data.getVar('CC', d, True)).read()[:-1]
cc1plus = os.popen("%s -print-prog-name=cc1plus" % data.getVar('CC', d, True)).read()[:-1]
spec = os.popen("%s -print-file-name=specs" % data.getVar('CC', d, True)).read()[:-1]
# CC1 and CC1PLUS should be there...
tar.add(cc1, os.path.join('usr', 'bin', 'cc1'))
tar.add(cc1plus, os.path.join('usr', 'bin', 'cc1plus'))
# spec - if it exists
if os.path.exists(spec):
tar.add(spec)
tar.close()
return tar_file
@ -78,7 +101,7 @@ def create_path(compilers, type, bb, d):
"""
import os
staging = bb.data.expand('${STAGING_DIR}', d) + "/ice/" + type
staging = os.path.join(bb.data.expand('${STAGING_DIR}', d), "ice", type)
icecc = bb.data.getVar('ICECC_PATH', d)
# Create the dir if necessary
@ -89,7 +112,7 @@ def create_path(compilers, type, bb, d):
for compiler in compilers:
gcc_path = staging + "/" + compiler
gcc_path = os.path.join(staging, compiler)
try:
os.stat(gcc_path)
except:
@ -105,11 +128,10 @@ def use_icc_version(bb,d):
return "no"
native = bb.data.expand('${PN}', d)
blacklist = [ "-cross", "-native" ]
blacklist = [ "cross", "native" ]
for black in blacklist:
if black in native:
if bb.data.inherits_class(black, d):
return "no"
return "yes"
@ -121,9 +143,9 @@ def icc_path(bb,d,compile):
if black in native:
return ""
if "-native" in native:
compile = False
if "-cross" in native:
blacklist = [ "cross", "native" ]
for black in blacklist:
if bb.data.inherits_class(black, d):
compile = False
prefix = bb.data.expand('${HOST_PREFIX}', d)

View File

@ -19,6 +19,7 @@ def map_kernel_arch(a, d):
elif re.match('armeb$', a): return 'arm'
elif re.match('powerpc$', a): return 'ppc'
elif re.match('mipsel$', a): return 'mips'
elif re.match('sh(3|4)$', a): return 'sh'
elif a in valid_archs: return a
else:
bb.error("cannot map '%s' to a linux kernel architecture" % a)

View File

@ -109,6 +109,21 @@ kernel_do_stage() {
mkdir -p ${STAGING_KERNEL_DIR}/include/pcmcia
cp -fR include/pcmcia/* ${STAGING_KERNEL_DIR}/include/pcmcia/
if [ -d drivers/crypto ]; then
mkdir -p ${STAGING_KERNEL_DIR}/drivers/crypto
cp -fR drivers/crypto/* ${STAGING_KERNEL_DIR}/drivers/crypto/
fi
if [ -d include/media ]; then
mkdir -p ${STAGING_KERNEL_DIR}/include/media
cp -fR include/media/* ${STAGING_KERNEL_DIR}/include/media/
fi
if [ -d include/acpi ]; then
mkdir -p ${STAGING_KERNEL_DIR}/include/acpi
cp -fR include/acpi/* ${STAGING_KERNEL_DIR}/include/acpi/
fi
if [ -d include/sound ]; then
mkdir -p ${STAGING_KERNEL_DIR}/include/sound
cp -fR include/sound/* ${STAGING_KERNEL_DIR}/include/sound/
@ -133,7 +148,7 @@ kernel_do_stage() {
# Check if arch/${ARCH}/Makefile exists and install it
if [ -e arch/${ARCH}/Makefile ]; then
install -d ${STAGING_KERNEL_DIR}/arch/${ARCH}
install -m 0644 arch/${ARCH}/Makefile ${STAGING_KERNEL_DIR}/arch/${ARCH}
install -m 0644 arch/${ARCH}/Makefile* ${STAGING_KERNEL_DIR}/arch/${ARCH}
fi
cp -fR include/config* ${STAGING_KERNEL_DIR}/include/
install -m 0644 ${KERNEL_OUTPUT} ${STAGING_KERNEL_DIR}/${KERNEL_IMAGETYPE}
@ -199,7 +214,7 @@ fi
if [ -n "$D" ]; then
${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
else
depmod -A
depmod -a
fi
}
@ -207,7 +222,7 @@ pkg_postinst_modules () {
if [ -n "$D" ]; then
${HOST_PREFIX}depmod-${KERNEL_MAJOR_VERSION} -A -b $D -F ${STAGING_KERNEL_DIR}/System.map-${KERNEL_RELEASE} ${KERNEL_VERSION}
else
depmod -A
depmod -a
update-modules || true
fi
}

View File

@ -38,7 +38,7 @@ pkg_postinst_append () {
if [ -n "$D" ]; then
exit 1
fi
depmod -A
depmod -a
update-modules || true
}

View File

@ -18,7 +18,7 @@ inherit palmtop
# Note that when CVS changes to 1.2.2, the dash
# should be removed from OPIE_CVS_PV to convert
# to the standardised version format
OPIE_CVS_PV = "1.2.1+cvs-${SRCDATE}"
OPIE_CVS_PV = "1.2.2+cvs-${SRCDATE}"
DEPENDS_prepend = "${@["libopie2 ", ""][(bb.data.getVar('PN', d, 1) == 'libopie2')]}"

View File

@ -20,7 +20,7 @@ def get_pkgconfig_mangle(d):
return s
do_stage_append () {
for pc in `find ${S} -name '*.pc' | grep -v -- '-uninstalled.pc$'`; do
for pc in `find ${S} -name '*.pc' -type f | grep -v -- '-uninstalled.pc$'`; do
pcname=`basename $pc`
install -d ${PKG_CONFIG_PATH}
cat $pc | sed ${@get_pkgconfig_mangle(d)} > ${PKG_CONFIG_PATH}/$pcname

View File

@ -64,13 +64,14 @@ def check_sanity(e):
if "diffstat-native" not in data.getVar('ASSUME_PROVIDED', e.data, True).split():
raise_sanity_error('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf')
# Check the MACHINE is valid
# Check that the MACHINE is valid
if not check_conf_exists("conf/machine/${MACHINE}.conf", e.data):
raise_sanity_error('Please set a valid MACHINE in your local.conf')
# Check the distro is valid
if not check_conf_exists("conf/distro/${DISTRO}.conf", e.data):
raise_sanity_error('Please set a valid DISTRO in your local.conf')
# Check that the DISTRO is valid
# need to take into account DISTRO renaming DISTRO
if not ( check_conf_exists("conf/distro/${DISTRO}.conf", e.data) or check_conf_exists("conf/distro/include/${DISTRO}.inc", e.data) ):
raise_sanity_error("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf" % data.getVar("DISTRO", e.data, True ))
if not check_app_exists("${MAKE}", e.data):
raise_sanity_error('GNU make missing. Please install GNU make')

View File

@ -26,7 +26,7 @@ sdl_do_sdl_install() {
Note=Auto Generated... this may be not what you want
Comment=${DESCRIPTION}
Exec=${APPNAME}
Icon=${APPIMAGE}
Icon=${PN}.png
Type=Application
Name=${PN}
EOF

View File

@ -255,6 +255,14 @@ def tinder_do_tinder_report(event):
information immediately. The caching/queuing needs to be
implemented. Also sending more or less information is not
implemented yet.
We have two temporary files stored in the TMP directory. One file
contains the assigned machine id for the tinderclient. This id gets
assigned when we connect the box and start the build process the second
file is used to workaround an EventHandler limitation. If BitBake is ran
with the continue option we want the Build to fail even if we get the
BuildCompleted Event. In this case we have to look up the status and
send it instead of 100/success.
"""
from bb.event import getName
from bb import data, mkdirhier, build
@ -264,7 +272,6 @@ def tinder_do_tinder_report(event):
name = getName(event)
log = ""
status = 1
#print asd
# Check what we need to do Build* shows we start or are done
if name == "BuildStarted":
tinder_build_start(event.data)
@ -272,9 +279,18 @@ def tinder_do_tinder_report(event):
try:
# truncate the tinder log file
f = file(data.getVar('TINDER_LOG', event.data, True), 'rw+')
f.truncate(0)
f = file(data.getVar('TINDER_LOG', event.data, True), 'w')
f.write("")
f.close()
except:
pass
try:
# write a status to the file. This is needed for the -k option
# of BitBake
g = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
g.write("")
g.close()
except IOError:
pass
@ -295,16 +311,27 @@ def tinder_do_tinder_report(event):
elif name == "TaskFailed":
log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task
elif name == "PkgStarted":
log += "---> TINDERBOX Package %s started\n" % data.getVar('P', event.data, True)
log += "---> TINDERBOX Package %s started\n" % data.getVar('PF', event.data, True)
elif name == "PkgSucceeded":
log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('P', event.data, True)
log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % data.getVar('PF', event.data, True)
elif name == "PkgFailed":
if not data.getVar('TINDER_AUTOBUILD', event.data, True) == "0":
build.exec_task('do_clean', event.data)
log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('P', event.data, True)
log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % data.getVar('PF', event.data, True)
status = 200
# remember the failure for the -k case
h = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
h.write("200")
elif name == "BuildCompleted":
log += "Build Completed\n"
status = 100
# Check if we have a old status...
try:
h = file(data.getVar('TMPDIR',event.data,True)+'/tinder-status', 'r')
status = int(h.read())
except:
pass
elif name == "MultipleProviders":
log += "---> TINDERBOX Multiple Providers\n"
log += "multiple providers are available (%s);\n" % ", ".join(event.getCandidates())
@ -315,6 +342,9 @@ def tinder_do_tinder_report(event):
log += "Error: No Provider for: %s\n" % event.getItem()
log += "Error:Was Runtime: %d\n" % event.isRuntime()
status = 200
# remember the failure for the -k case
h = file(data.getVar('TMPDIR', event.data, True)+"/tinder-status", 'w')
h.write("200")
# now post the log
if len(log) == 0: