2008-03-10 15:43:39 +00:00
BB_DEFAULT_TASK ?= "build"
2005-08-31 10:45:47 +00:00
2007-08-08 21:04:28 +00:00
# like os.path.join but doesn't treat absolute RHS specially
def base_path_join(a, *p):
path = a
for b in p:
if path == '' or path.endswith('/'):
path += b
else:
path += '/' + b
return path
# for MD5/SHA handling
def base_chk_load_parser(config_path):
2009-11-08 20:54:38 +00:00
import ConfigParser
2007-08-08 21:04:28 +00:00
parser = ConfigParser.ConfigParser()
if not len(parser.read(config_path)) == 1:
bb.note("Can not open the '%s' ini file" % config_path)
raise Exception("Can not open the '%s'" % config_path)
return parser
def base_chk_file(parser, pn, pv, src_uri, localpath, data):
2007-10-17 09:54:00 +00:00
no_checksum = False
2007-08-08 21:04:28 +00:00
# Try PN-PV-SRC_URI first and then try PN-SRC_URI
# we rely on the get method to create errors
pn_pv_src = "%s-%s-%s" % (pn,pv,src_uri)
pn_src = "%s-%s" % (pn,src_uri)
if parser.has_section(pn_pv_src):
md5 = parser.get(pn_pv_src, "md5")
sha256 = parser.get(pn_pv_src, "sha256")
elif parser.has_section(pn_src):
md5 = parser.get(pn_src, "md5")
sha256 = parser.get(pn_src, "sha256")
elif parser.has_section(src_uri):
md5 = parser.get(src_uri, "md5")
sha256 = parser.get(src_uri, "sha256")
else:
2007-10-17 09:54:00 +00:00
no_checksum = True
2007-08-08 21:04:28 +00:00
# md5 and sha256 should be valid now
if not os.path.exists(localpath):
bb.note("The localpath does not exist '%s'" % localpath)
raise Exception("The path does not exist '%s'" % localpath)
2008-05-01 11:00:27 +00:00
# Calculate the MD5 and 256-bit SHA checksums
md5data = bb.utils.md5_file(localpath)
shadata = bb.utils.sha256_file(localpath)
2007-08-08 21:04:28 +00:00
2008-05-01 11:00:27 +00:00
# sha256_file() can return None if we are running on Python 2.4 (hashlib is
# 2.5 onwards, sha in 2.4 is 160-bit only), so check for this and call the
# standalone shasum binary if required.
if shadata is None:
try:
shapipe = os.popen('PATH=%s oe_sha256sum %s' % (bb.data.getVar('PATH', data, True), localpath))
shadata = (shapipe.readline().split() or [ "" ])[0]
shapipe.close()
except OSError:
raise Exception("Executing shasum failed, please build shasum-native")
2007-10-17 09:54:00 +00:00
if no_checksum == True: # we do not have conf/checksums.ini entry
try:
file = open("%s/checksums.ini" % bb.data.getVar("TMPDIR", data, 1), "a")
except:
return False
if not file:
raise Exception("Creating checksums.ini failed")
file.write("[%s]\nmd5=%s\nsha256=%s\n\n" % (src_uri, md5data, shadata))
file.close()
return False
2007-08-08 21:04:28 +00:00
if not md5 == md5data:
bb.note("The MD5Sums did not match. Wanted: '%s' and Got: '%s'" % (md5,md5data))
raise Exception("MD5 Sums do not match. Wanted: '%s' Got: '%s'" % (md5, md5data))
if not sha256 == shadata:
bb.note("The SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256,shadata))
raise Exception("SHA256 Sums do not match. Wanted: '%s' Got: '%s'" % (sha256, shadata))
return True
2005-08-31 10:45:47 +00:00
def base_dep_prepend(d):
#
# Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in
# that case though.
#
2008-05-01 11:42:24 +00:00
deps = ""
# bb.utils.sha256_file() will return None on Python 2.4 because hashlib
# isn't present. In this case we use a shasum-native to checksum, so if
# hashlib isn't present then add shasum-native to the dependencies.
try:
import hashlib
except ImportError:
# Adding shasum-native as a dependency of shasum-native would be
# stupid, so don't do that.
if bb.data.getVar('PN', d, True) != "shasum-native":
deps = "shasum-native "
2005-08-31 10:45:47 +00:00
# INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
# we need that built is the responsibility of the patch function / class, not
# the application.
if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
if (bb.data.getVar('HOST_SYS', d, 1) !=
bb.data.getVar('BUILD_SYS', d, 1)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
return deps
def base_read_file(filename):
try:
f = file( filename, "r" )
except IOError, reason:
2006-05-09 16:10:46 +00:00
return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
2005-08-31 10:45:47 +00:00
else:
return f.read().strip()
return None
def base_conditional(variable, checkvalue, truevalue, falsevalue, d):
if bb.data.getVar(variable,d,1) == checkvalue:
return truevalue
else:
return falsevalue
2007-08-08 21:04:28 +00:00
def base_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
if float(bb.data.getVar(variable,d,1)) <= float(checkvalue):
return truevalue
else:
return falsevalue
2008-03-03 22:58:30 +00:00
def base_version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
result = bb.vercmp(bb.data.getVar(variable,d,True), checkvalue)
if result <= 0:
return truevalue
else:
return falsevalue
2006-12-20 00:00:03 +00:00
def base_contains(variable, checkvalues, truevalue, falsevalue, d):
matches = 0
if type(checkvalues).__name__ == "str":
checkvalues = [checkvalues]
for value in checkvalues:
if bb.data.getVar(variable,d,1).find(value) != -1:
matches = matches + 1
if matches == len(checkvalues):
return truevalue
return falsevalue
2006-07-24 09:22:17 +00:00
2006-07-26 22:17:08 +00:00
def base_both_contain(variable1, variable2, checkvalue, d):
if bb.data.getVar(variable1,d,1).find(checkvalue) != -1 and bb.data.getVar(variable2,d,1).find(checkvalue) != -1:
return checkvalue
else:
return ""
2005-08-31 10:45:47 +00:00
DEPENDS_prepend="${@base_dep_prepend(d)} "
2009-11-09 14:45:57 +00:00
DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
2005-08-31 10:45:47 +00:00
2009-01-02 10:15:45 +00:00
def base_prune_suffix(var, suffixes, d):
# See if var ends with any of the suffixes listed and
# remove it if found
for suffix in suffixes:
if var.endswith(suffix):
return var.replace(suffix, "")
return var
2005-08-31 10:45:47 +00:00
def base_set_filespath(path, d):
filespath = []
2008-03-10 15:43:39 +00:00
# The ":" ensures we have an 'empty' override
overrides = (bb.data.getVar("OVERRIDES", d, 1) or "") + ":"
2005-08-31 10:45:47 +00:00
for p in path:
for o in overrides.split(":"):
filespath.append(os.path.join(p, o))
2006-08-27 16:01:33 +00:00
return ":".join(filespath)
2005-08-31 10:45:47 +00:00
2009-01-02 10:15:45 +00:00
FILESPATH = "${@base_set_filespath([ "${FILE_DIRNAME}/${PF}", "${FILE_DIRNAME}/${P}", "${FILE_DIRNAME}/${PN}", "${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files", "${FILE_DIRNAME}" ], d)}"
2005-08-31 10:45:47 +00:00
def oe_filter(f, str, d):
from re import match
return " ".join(filter(lambda x: match(f, x, 0), str.split()))
def oe_filter_out(f, str, d):
from re import match
return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
die() {
oefatal "$*"
}
oenote() {
echo "NOTE:" "$*"
}
oewarn() {
echo "WARNING:" "$*"
}
oefatal() {
echo "FATAL:" "$*"
exit 1
}
oedebug() {
test $# -ge 2 || {
echo "Usage: oedebug level \"message\""
exit 1
}
test ${OEDEBUG:-0} -ge $1 && {
shift
echo "DEBUG:" $*
}
}
oe_runmake() {
if [ x"$MAKE" = x ]; then MAKE=make; fi
oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
}
oe_soinstall() {
# Purpose: Install shared library file and
# create the necessary links
# Example:
#
# oe_
#
#oenote installing shared library $1 to $2
#
libname=`basename $1`
install -m 755 $1 $2/$libname
sonamelink=`${HOST_PREFIX}readelf -d $1 |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
solink=`echo $libname | sed -e 's/\.so\..*/.so/'`
ln -sf $libname $2/$sonamelink
ln -sf $libname $2/$solink
}
oe_libinstall() {
# Purpose: Install a library, in all its forms
# Example
#
# oe_libinstall libltdl ${STAGING_LIBDIR}/
# oe_libinstall -C src/libblah libblah ${D}/${libdir}/
dir=""
libtool=""
silent=""
require_static=""
require_shared=""
2005-12-12 14:41:23 +00:00
staging_install=""
2005-08-31 10:45:47 +00:00
while [ "$#" -gt 0 ]; do
case "$1" in
-C)
shift
dir="$1"
;;
-s)
silent=1
;;
-a)
require_static=1
;;
-so)
require_shared=1
;;
-*)
oefatal "oe_libinstall: unknown option: $1"
;;
*)
break;
;;
esac
shift
done
libname="$1"
shift
destpath="$1"
if [ -z "$destpath" ]; then
oefatal "oe_libinstall: no destination path specified"
fi
2005-12-12 14:41:23 +00:00
if echo "$destpath/" | egrep '^${STAGING_LIBDIR}/' >/dev/null
then
staging_install=1
fi
2005-08-31 10:45:47 +00:00
__runcmd () {
if [ -z "$silent" ]; then
echo >&2 "oe_libinstall: $*"
fi
$*
}
if [ -z "$dir" ]; then
dir=`pwd`
fi
2008-05-12 14:10:06 +00:00
2006-02-10 11:38:23 +00:00
dotlai=$libname.lai
2008-05-12 14:10:06 +00:00
# Sanity check that the libname.lai is unique
number_of_files=`(cd $dir; find . -name "$dotlai") | wc -l`
if [ $number_of_files -gt 1 ]; then
oefatal "oe_libinstall: $dotlai is not unique in $dir"
fi
2006-08-27 16:01:33 +00:00
dir=$dir`(cd $dir;find . -name "$dotlai") | sed "s/^\.//;s/\/$dotlai\$//;q"`
2005-08-31 10:45:47 +00:00
olddir=`pwd`
__runcmd cd $dir
lafile=$libname.la
2006-11-20 12:51:14 +00:00
# If such file doesn't exist, try to cut version suffix
if [ ! -f "$lafile" ]; then
2006-11-21 17:46:14 +00:00
libname1=`echo "$libname" | sed 's/-[0-9.]*$//'`
lafile1=$libname.la
if [ -f "$lafile1" ]; then
libname=$libname1
lafile=$lafile1
fi
2006-11-20 12:51:14 +00:00
fi
2005-08-31 10:45:47 +00:00
if [ -f "$lafile" ]; then
# libtool archive
eval `cat $lafile|grep "^library_names="`
libtool=1
else
library_names="$libname.so* $libname.dll.a"
fi
__runcmd install -d $destpath/
dota=$libname.a
if [ -f "$dota" -o -n "$require_static" ]; then
2009-05-19 11:09:21 +00:00
rm -f $destpath/$dota
2005-08-31 10:45:47 +00:00
__runcmd install -m 0644 $dota $destpath/
fi
if [ -f "$dotlai" -a -n "$libtool" ]; then
2005-12-12 14:41:23 +00:00
if test -n "$staging_install"
then
# stop libtool using the final directory name for libraries
# in staging:
__runcmd rm -f $destpath/$libname.la
2007-11-13 17:17:37 +00:00
__runcmd sed -e 's/^installed=yes$/installed=no/' \
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
2008-04-22 12:48:30 +00:00
-e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
2007-11-13 17:17:37 +00:00
$dotlai >$destpath/$libname.la
2005-12-12 14:41:23 +00:00
else
2009-05-19 11:09:21 +00:00
rm -f $destpath/$libname.la
2005-12-12 14:41:23 +00:00
__runcmd install -m 0644 $dotlai $destpath/$libname.la
fi
2005-08-31 10:45:47 +00:00
fi
for name in $library_names; do
files=`eval echo $name`
for f in $files; do
if [ ! -e "$f" ]; then
if [ -n "$libtool" ]; then
oefatal "oe_libinstall: $dir/$f not found."
fi
elif [ -L "$f" ]; then
__runcmd cp -P "$f" $destpath/
elif [ ! -L "$f" ]; then
libfile="$f"
2009-05-19 11:09:21 +00:00
rm -f $destpath/$libfile
2005-08-31 10:45:47 +00:00
__runcmd install -m 0755 $libfile $destpath/
fi
done
done
if [ -z "$libfile" ]; then
if [ -n "$require_shared" ]; then
oefatal "oe_libinstall: unable to locate shared library"
fi
elif [ -z "$libtool" ]; then
# special case hack for non-libtool .so.#.#.# links
baselibfile=`basename "$libfile"`
if (echo $baselibfile | grep -qE '^lib.*\.so\.[0-9.]*$'); then
sonamelink=`${HOST_PREFIX}readelf -d $libfile |grep 'Library soname:' |sed -e 's/.*\[\(.*\)\].*/\1/'`
solink=`echo $baselibfile | sed -e 's/\.so\..*/.so/'`
if [ -n "$sonamelink" -a x"$baselibfile" != x"$sonamelink" ]; then
__runcmd ln -sf $baselibfile $destpath/$sonamelink
fi
__runcmd ln -sf $baselibfile $destpath/$solink
fi
fi
__runcmd cd "$olddir"
}
2008-03-19 16:07:25 +00:00
def package_stagefile(file, d):
if bb.data.getVar('PSTAGING_ACTIVE', d, True) == "1":
destfile = file.replace(bb.data.getVar("TMPDIR", d, 1), bb.data.getVar("PSTAGE_TMPDIR_STAGE", d, 1))
bb.mkdirhier(os.path.dirname(destfile))
#print "%s to %s" % (file, destfile)
bb.copyfile(file, destfile)
package_stagefile_shell() {
if [ "$PSTAGING_ACTIVE" = "1" ]; then
srcfile=$1
destfile=`echo $srcfile | sed s#${TMPDIR}#${PSTAGE_TMPDIR_STAGE}#`
destdir=`dirname $destfile`
mkdir -p $destdir
cp -dp $srcfile $destfile
fi
}
2005-08-31 10:45:47 +00:00
oe_machinstall() {
# Purpose: Install machine dependent files, if available
# If not available, check if there is a default
# If no default, just touch the destination
# Example:
# $1 $2 $3 $4
# oe_machinstall -m 0644 fstab ${D}/etc/fstab
#
# TODO: Check argument number?
#
filename=`basename $3`
dirname=`dirname $3`
for o in `echo ${OVERRIDES} | tr ':' ' '`; do
if [ -e $dirname/$o/$filename ]; then
oenote $dirname/$o/$filename present, installing to $4
install $1 $2 $dirname/$o/$filename $4
return
fi
done
# oenote overrides specific file NOT present, trying default=$3...
if [ -e $3 ]; then
oenote $3 present, installing to $4
install $1 $2 $3 $4
else
oenote $3 NOT present, touching empty $4
touch $4
fi
}
addtask listtasks
do_listtasks[nostamp] = "1"
python do_listtasks() {
import sys
# emit variables and shell functions
#bb.data.emit_env(sys.__stdout__, d)
# emit the metadata which isnt valid shell
for e in d.keys():
if bb.data.getVarFlag(e, 'task', d):
sys.__stdout__.write("%s\n" % e)
}
addtask clean
do_clean[dirs] = "${TOPDIR}"
do_clean[nostamp] = "1"
python base_do_clean() {
"""clear the build and temp directories"""
dir = bb.data.expand("${WORKDIR}", d)
if dir == '//': raise bb.build.FuncFailed("wrong DATADIR")
bb.note("removing " + dir)
os.system('rm -rf ' + dir)
dir = "%s.*" % bb.data.expand(bb.data.getVar('STAMP', d), d)
bb.note("removing " + dir)
os.system('rm -f '+ dir)
}
2008-03-03 22:58:30 +00:00
addtask rebuild after do_${BB_DEFAULT_TASK}
2006-08-31 09:20:47 +00:00
do_rebuild[dirs] = "${TOPDIR}"
do_rebuild[nostamp] = "1"
python base_do_rebuild() {
"""rebuild a package"""
}
2008-05-29 09:28:36 +00:00
#addtask mrproper
#do_mrproper[dirs] = "${TOPDIR}"
#do_mrproper[nostamp] = "1"
#python base_do_mrproper() {
# """clear downloaded sources, build and temp directories"""
# dir = bb.data.expand("${DL_DIR}", d)
# if dir == '/': bb.build.FuncFailed("wrong DATADIR")
# bb.debug(2, "removing " + dir)
# os.system('rm -rf ' + dir)
# bb.build.exec_func('do_clean', d)
#}
2005-08-31 10:45:47 +00:00
2008-04-27 10:48:16 +00:00
SCENEFUNCS += "base_scenefunction"
python base_do_setscene () {
for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
bb.build.exec_func(f, d)
if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
bb.build.make_stamp("do_setscene", d)
}
do_setscene[selfstamp] = "1"
addtask setscene before do_fetch
python base_scenefunction () {
stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
if os.path.exists(stamp):
bb.build.exec_func("do_clean", d)
}
2005-08-31 10:45:47 +00:00
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
python base_do_fetch() {
import sys
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
src_uri = bb.data.getVar('SRC_URI', localdata, 1)
if not src_uri:
return 1
try:
bb.fetch.init(src_uri.split(),d)
except bb.fetch.NoMethodError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("No method: %s" % value)
try:
bb.fetch.go(localdata)
except bb.fetch.MissingParameterError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Missing parameters: %s" % value)
except bb.fetch.FetchError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Fetch failed: %s" % value)
2007-08-08 21:04:28 +00:00
except bb.fetch.MD5SumError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("MD5 failed: %s" % value)
except:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
# Verify the SHA and MD5 sums we have in OE and check what do
# in
check_sum = bb.which(bb.data.getVar('BBPATH', d, True), "conf/checksums.ini")
if not check_sum:
bb.note("No conf/checksums.ini found, not checking checksums")
return
try:
parser = base_chk_load_parser(check_sum)
except:
bb.note("Creating the CheckSum parser failed")
return
pv = bb.data.getVar('PV', d, True)
pn = bb.data.getVar('PN', d, True)
# Check each URI
for url in src_uri.split():
localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
(type,host,path,_,_,_) = bb.decodeurl(url)
uri = "%s://%s%s" % (type,host,path)
try:
2007-10-17 09:54:00 +00:00
if type == "http" or type == "https" or type == "ftp" or type == "ftps":
if not base_chk_file(parser, pn, pv,uri, localpath, d):
2007-10-16 14:57:43 +00:00
bb.note("%s-%s: %s has no entry in conf/checksums.ini, not checking URI" % (pn,pv,uri))
2007-08-08 21:04:28 +00:00
except Exception:
raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
2005-08-31 10:45:47 +00:00
}
2006-11-20 12:51:14 +00:00
addtask fetchall after do_fetch
2006-10-16 23:19:19 +00:00
do_fetchall[recrdeptask] = "do_fetch"
2006-11-20 12:51:14 +00:00
base_do_fetchall() {
:
2006-10-16 23:19:19 +00:00
}
2008-04-27 11:28:34 +00:00
addtask checkuri
do_checkuri[nostamp] = "1"
python do_checkuri() {
import sys
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
src_uri = bb.data.getVar('SRC_URI', localdata, 1)
try:
bb.fetch.init(src_uri.split(),d)
except bb.fetch.NoMethodError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("No method: %s" % value)
try:
bb.fetch.checkstatus(localdata)
except bb.fetch.MissingParameterError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Missing parameters: %s" % value)
except bb.fetch.FetchError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Fetch failed: %s" % value)
except bb.fetch.MD5SumError:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("MD5 failed: %s" % value)
except:
(type, value, traceback) = sys.exc_info()
raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
}
addtask checkuriall after do_checkuri
do_checkuriall[recrdeptask] = "do_checkuri"
do_checkuriall[nostamp] = "1"
base_do_checkuriall() {
:
}
2007-11-25 14:07:17 +00:00
addtask buildall after do_build
do_buildall[recrdeptask] = "do_build"
base_do_buildall() {
:
}
2009-08-25 15:37:50 +00:00
def subprocess_setup():
2009-11-03 22:02:34 +00:00
import signal
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
# SIGPIPE errors are known issues with gzip/bash
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
2009-08-25 15:37:50 +00:00
2005-08-31 10:45:47 +00:00
def oe_unpack_file(file, data, url = None):
2009-11-08 20:54:38 +00:00
import subprocess
2005-08-31 10:45:47 +00:00
if not url:
url = "file://%s" % file
dots = file.split(".")
if dots[-1] in ['gz', 'bz2', 'Z']:
efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
else:
efile = file
cmd = None
if file.endswith('.tar'):
cmd = 'tar x --no-same-owner -f %s' % file
2006-11-20 12:51:14 +00:00
elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
2005-08-31 10:45:47 +00:00
cmd = 'tar xz --no-same-owner -f %s' % file
2008-03-03 22:58:30 +00:00
elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
2005-08-31 10:45:47 +00:00
cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
cmd = 'gzip -dc %s > %s' % (file, efile)
elif file.endswith('.bz2'):
cmd = 'bzip2 -dc %s > %s' % (file, efile)
2009-01-02 10:35:32 +00:00
elif file.endswith('.zip') or file.endswith('.jar'):
cmd = 'unzip -q -o'
2006-03-23 22:31:13 +00:00
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
if 'dos' in parm:
cmd = '%s -a' % cmd
2008-10-23 20:08:42 +00:00
cmd = "%s '%s'" % (cmd, file)
2005-08-31 10:45:47 +00:00
elif os.path.isdir(file):
filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, 1))
destdir = "."
if file[0:len(filesdir)] == filesdir:
destdir = file[len(filesdir):file.rfind('/')]
destdir = destdir.strip('/')
if len(destdir) < 1:
destdir = "."
elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
os.makedirs("%s/%s" % (os.getcwd(), destdir))
2007-07-07 08:33:13 +00:00
cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
2005-08-31 10:45:47 +00:00
else:
(type, host, path, user, pswd, parm) = bb.decodeurl(url)
if not 'patch' in parm:
# The "destdir" handling was specifically done for FILESPATH
# items. So, only do so for file:// entries.
if type == "file":
destdir = bb.decodeurl(url)[1] or "."
else:
destdir = "."
bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
2007-07-07 08:33:13 +00:00
cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
2006-08-27 16:01:33 +00:00
2005-08-31 10:45:47 +00:00
if not cmd:
return True
2006-07-21 08:56:29 +00:00
dest = os.path.join(os.getcwd(), os.path.basename(file))
if os.path.exists(dest):
if os.path.samefile(file, dest):
return True
2009-01-15 16:55:32 +00:00
# Change to subdir before executing command
save_cwd = os.getcwd();
parm = bb.decodeurl(url)[5]
if 'subdir' in parm:
newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
bb.mkdirhier(newdir)
os.chdir(newdir)
2005-08-31 10:45:47 +00:00
cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
2009-08-25 15:37:50 +00:00
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
2009-01-15 16:55:32 +00:00
os.chdir(save_cwd)
2005-08-31 10:45:47 +00:00
return ret == 0
addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}"
python base_do_unpack() {
2009-11-08 20:54:38 +00:00
import re
2005-08-31 10:45:47 +00:00
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
2009-01-02 11:18:42 +00:00
src_uri = bb.data.getVar('SRC_URI', localdata, True)
2005-08-31 10:45:47 +00:00
if not src_uri:
return
2009-01-02 11:18:42 +00:00
2005-08-31 10:45:47 +00:00
for url in src_uri.split():
try:
local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
except bb.MalformedUrl, e:
raise FuncFailed('Unable to generate local path for malformed uri: %s' % e)
local = os.path.realpath(local)
ret = oe_unpack_file(local, localdata, url)
if not ret:
raise bb.build.FuncFailed()
}
2009-12-17 20:35:08 +00:00
METADATA_BRANCH ?= "${@base_detect_branch(d)}"
METADATA_REVISION ?= "${@base_detect_revision(d)}"
def base_detect_revision(d):
2009-12-18 12:11:42 +00:00
path = base_get_scmbasepath(d)
2009-12-17 20:35:08 +00:00
scms = [base_get_metadata_git_revision, \
base_get_metadata_svn_revision]
for scm in scms:
rev = scm(path, d)
if rev <> "<unknown>":
return rev
return "<unknown>"
def base_detect_branch(d):
2009-12-18 12:11:42 +00:00
path = base_get_scmbasepath(d)
2009-12-17 20:35:08 +00:00
scms = [base_get_metadata_git_branch]
for scm in scms:
rev = scm(path, d)
if rev <> "<unknown>":
return rev.strip()
return "<unknown>"
2008-02-15 15:23:45 +00:00
def base_get_scmbasepath(d):
path_to_bbfiles = bb.data.getVar( 'BBFILES', d, 1 ).split()
return path_to_bbfiles[0][:path_to_bbfiles[0].rindex( "packages" )]
2009-12-17 20:35:08 +00:00
def base_get_metadata_monotone_branch(path, d):
2009-01-02 11:09:04 +00:00
monotone_branch = "<unknown>"
try:
2009-12-17 20:35:08 +00:00
monotone_branch = file( "%s/_MTN/options" % path ).read().strip()
2009-01-02 11:09:04 +00:00
if monotone_branch.startswith( "database" ):
monotone_branch_words = monotone_branch.split()
monotone_branch = monotone_branch_words[ monotone_branch_words.index( "branch" )+1][1:-1]
except:
pass
return monotone_branch
2009-12-17 20:35:08 +00:00
def base_get_metadata_monotone_revision(path, d):
2008-02-15 15:23:45 +00:00
monotone_revision = "<unknown>"
try:
2009-12-17 20:35:08 +00:00
monotone_revision = file( "%s/_MTN/revision" % path ).read().strip()
2008-02-15 15:23:45 +00:00
if monotone_revision.startswith( "format_version" ):
monotone_revision_words = monotone_revision.split()
monotone_revision = monotone_revision_words[ monotone_revision_words.index( "old_revision" )+1][1:-1]
except IOError:
pass
return monotone_revision
2009-12-17 20:35:08 +00:00
def base_get_metadata_svn_revision(path, d):
2008-02-15 15:23:45 +00:00
revision = "<unknown>"
try:
2009-12-17 20:35:08 +00:00
revision = file( "%s/.svn/entries" % path ).readlines()[3].strip()
2008-02-15 15:23:45 +00:00
except IOError:
pass
return revision
2009-12-17 20:35:08 +00:00
def base_get_metadata_git_branch(path, d):
branch = os.popen('cd %s; git branch | grep "^* " | tr -d "* "' % path).read()
2009-01-02 11:09:04 +00:00
if len(branch) != 0:
return branch
return "<unknown>"
2009-12-18 12:13:32 +00:00
def base_get_metadata_git_revision(path, d):
rev = os.popen("cd %s; git log -n 1 --pretty=oneline --" % path).read().split(" ")[0]
2009-01-02 11:09:04 +00:00
if len(rev) != 0:
return rev
return "<unknown>"
2008-11-04 13:32:35 +00:00
GIT_CONFIG = "${STAGING_DIR_NATIVE}/usr/etc/gitconfig"
2008-10-30 17:42:01 +00:00
def generate_git_config(e):
from bb import data
if data.getVar('GIT_CORE_CONFIG', e.data, True):
2008-11-04 13:32:35 +00:00
gitconfig_path = bb.data.getVar('GIT_CONFIG', e.data, True)
2008-10-31 14:38:11 +00:00
proxy_command = " gitproxy = %s\n" % data.getVar('GIT_PROXY_COMMAND', e.data, True)
2008-10-30 17:42:01 +00:00
2008-10-31 14:40:49 +00:00
bb.mkdirhier(bb.data.expand("${STAGING_DIR_NATIVE}/usr/etc/", e.data))
2008-10-30 17:42:01 +00:00
if (os.path.exists(gitconfig_path)):
os.remove(gitconfig_path)
f = open(gitconfig_path, 'w')
f.write("[core]\n")
2008-11-04 19:13:25 +00:00
ignore_hosts = data.getVar('GIT_PROXY_IGNORE', e.data, True).split()
for ignore_host in ignore_hosts:
2008-11-04 11:43:33 +00:00
f.write(" gitproxy = none for %s\n" % ignore_host)
2008-10-31 14:38:11 +00:00
f.write(proxy_command)
f.close
2008-10-30 17:42:01 +00:00
2005-08-31 10:45:47 +00:00
addhandler base_eventhandler
python base_eventhandler() {
from bb import note, error, data
from bb.event import Handled, NotHandled, getName
messages = {}
messages["Completed"] = "completed"
messages["Succeeded"] = "completed"
messages["Started"] = "started"
messages["Failed"] = "failed"
name = getName(e)
msg = ""
if name.startswith("Pkg"):
msg += "package %s: " % data.getVar("P", e.data, 1)
msg += messages.get(name[3:]) or name[3:]
elif name.startswith("Task"):
msg += "package %s: task %s: " % (data.getVar("PF", e.data, 1), e.task)
msg += messages.get(name[4:]) or name[4:]
elif name.startswith("Build"):
msg += "build %s: " % e.name
msg += messages.get(name[5:]) or name[5:]
elif name == "UnsatisfiedDep":
msg += "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
2008-10-20 11:35:37 +00:00
# Only need to output when using 1.8 or lower, the UI code handles it
# otherwise
if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
if msg:
note(msg)
2005-08-31 10:45:47 +00:00
if name.startswith("BuildStarted"):
2005-09-20 07:04:35 +00:00
bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
2009-01-02 11:09:04 +00:00
statusvars = ['BB_VERSION', 'METADATA_BRANCH', 'METADATA_REVISION', 'TARGET_ARCH', 'TARGET_OS', 'MACHINE', 'DISTRO', 'DISTRO_VERSION','TARGET_FPU']
2008-02-17 10:29:11 +00:00
statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
2005-08-31 10:45:47 +00:00
statusmsg = "\nOE Build Configuration:\n%s\n" % '\n'.join(statuslines)
print statusmsg
needed_vars = [ "TARGET_ARCH", "TARGET_OS" ]
pesteruser = []
for v in needed_vars:
val = bb.data.getVar(v, e.data, 1)
if not val or val == 'INVALID':
pesteruser.append(v)
if pesteruser:
bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
2008-03-03 22:58:30 +00:00
#
# Handle removing stamps for 'rebuild' task
#
if name.startswith("StampUpdate"):
for (fn, task) in e.targets:
#print "%s %s" % (task, fn)
if task == "do_rebuild":
dir = "%s.*" % e.stampPrefix[fn]
bb.note("Removing stamps: " + dir)
os.system('rm -f '+ dir)
2008-04-27 10:48:16 +00:00
os.system('touch ' + e.stampPrefix[fn] + '.needclean')
2008-03-03 22:58:30 +00:00
2008-10-30 17:42:01 +00:00
if name == "ConfigParsed":
generate_git_config(e)
2005-08-31 10:45:47 +00:00
if not data in e.__dict__:
return NotHandled
log = data.getVar("EVENTLOG", e.data, 1)
if log:
logfile = file(log, "a")
logfile.write("%s\n" % msg)
logfile.close()
return NotHandled
}
addtask configure after do_unpack do_patch
do_configure[dirs] = "${S} ${B}"
2009-11-10 14:55:23 +00:00
do_configure[deptask] = "do_populate_sysroot"
2005-08-31 10:45:47 +00:00
base_do_configure() {
:
}
addtask compile after do_configure
do_compile[dirs] = "${S} ${B}"
base_do_compile() {
if [ -e Makefile -o -e makefile ]; then
oe_runmake || die "make failed"
else
oenote "nothing to compile"
fi
}
2009-11-02 17:10:51 +00:00
sysroot_stage_dir() {
src="$1"
dest="$2"
# This will remove empty directories so we can ignore them
rmdir "$src" 2> /dev/null || true
if [ -d "$src" ]; then
mkdir -p "$dest"
cp -fpPR "$src"/* "$dest"
fi
}
sysroot_stage_libdir() {
src="$1"
dest="$2"
olddir=`pwd`
cd $src
las=$(find . -name \*.la -type f)
cd $olddir
echo "Found la files: $las"
for i in $las
do
sed -e 's/^installed=yes$/installed=no/' \
-e '/^dependency_libs=/s,${WORKDIR}[[:alnum:]/\._+-]*/\([[:alnum:]\._+-]*\),${STAGING_LIBDIR}/\1,g' \
-e "/^dependency_libs=/s,\([[:space:]']\)${libdir},\1${STAGING_LIBDIR},g" \
-i $src/$i
done
sysroot_stage_dir $src $dest
}
sysroot_stage_dirs() {
from="$1"
to="$2"
sysroot_stage_dir $from${includedir} $to${STAGING_INCDIR}
if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then
sysroot_stage_dir $from${bindir} $to${STAGING_DIR_HOST}${bindir}
sysroot_stage_dir $from${sbindir} $to${STAGING_DIR_HOST}${sbindir}
sysroot_stage_dir $from${base_bindir} $to${STAGING_DIR_HOST}${base_bindir}
sysroot_stage_dir $from${base_sbindir} $to${STAGING_DIR_HOST}${base_sbindir}
sysroot_stage_dir $from${libexecdir} $to${STAGING_DIR_HOST}${libexecdir}
2009-11-30 15:11:22 +00:00
sysroot_stage_dir $from${sysconfdir} $to${STAGING_DIR_HOST}${sysconfdir}
2009-11-02 17:10:51 +00:00
fi
if [ -d $from${libdir} ]
then
sysroot_stage_libdir $from/${libdir} $to${STAGING_LIBDIR}
fi
if [ -d $from${base_libdir} ]
then
sysroot_stage_libdir $from${base_libdir} $to${STAGING_DIR_HOST}${base_libdir}
fi
sysroot_stage_dir $from${datadir} $to${STAGING_DATADIR}
}
sysroot_stage_all() {
sysroot_stage_dirs ${D} ${SYSROOT_DESTDIR}
}
2009-11-02 20:43:22 +00:00
def is_legacy_staging(d):
stagefunc = bb.data.getVar('do_stage', d, True)
legacy = True
if stagefunc is None:
legacy = False
2009-11-13 15:37:00 +00:00
elif stagefunc.strip() == "use_do_install_for_stage":
legacy = False
2009-11-02 20:43:22 +00:00
elif stagefunc.strip() == "autotools_stage_all":
legacy = False
elif stagefunc.strip() == "do_stage_native" and bb.data.getVar('AUTOTOOLS_NATIVE_STAGE_INSTALL', d, 1) == "1":
legacy = False
2009-11-05 11:33:04 +00:00
elif bb.data.getVar('NATIVE_INSTALL_WORKS', d, 1) == "1":
legacy = False
2009-11-02 20:43:22 +00:00
return legacy
2005-08-31 10:45:47 +00:00
2009-11-10 14:55:23 +00:00
do_populate_sysroot[dirs] = "${STAGING_DIR_TARGET}/${bindir} ${STAGING_DIR_TARGET}/${libdir} \
2009-09-16 22:09:44 +00:00
${STAGING_DIR_TARGET}/${includedir} \
2007-10-29 15:16:19 +00:00
${STAGING_BINDIR_NATIVE} ${STAGING_LIBDIR_NATIVE} \
${STAGING_INCDIR_NATIVE} \
2005-08-31 10:45:47 +00:00
${STAGING_DATADIR} \
${S} ${B}"
2009-11-10 14:55:23 +00:00
# Could be compile but populate_sysroot and do_install shouldn't run at the same time
addtask populate_sysroot after do_install
2005-08-31 10:45:47 +00:00
2009-11-02 20:43:22 +00:00
PSTAGING_ACTIVE = "0"
2009-11-02 17:27:05 +00:00
SYSROOT_PREPROCESS_FUNCS ?= ""
SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir/"
2009-11-02 17:38:44 +00:00
SYSROOT_LOCK = "${STAGING_DIR}/staging.lock"
2009-11-02 17:27:05 +00:00
2009-11-10 14:55:23 +00:00
python populate_sysroot_prehook () {
2009-11-02 20:43:22 +00:00
return
}
2009-11-10 14:55:23 +00:00
python populate_sysroot_posthook () {
2009-11-02 20:43:22 +00:00
return
}
2009-11-03 22:02:34 +00:00
packagedstaging_fastpath () {
2009-11-02 20:43:22 +00:00
:
}
2009-11-10 14:55:23 +00:00
python do_populate_sysroot () {
2009-10-29 23:34:41 +00:00
#
2009-11-02 20:43:22 +00:00
# if do_stage exists, we're legacy. In that case run the do_stage,
# modify the SYSROOT_DESTDIR variable and then run the staging preprocess
# functions against staging directly.
2009-10-29 23:34:41 +00:00
#
2009-11-02 20:43:22 +00:00
# Otherwise setup a destdir, copy the results from do_install
# and run the staging preprocess against that
#
pstageactive = (bb.data.getVar("PSTAGING_ACTIVE", d, True) == "1")
lockfile = bb.data.getVar("SYSROOT_LOCK", d, True)
stagefunc = bb.data.getVar('do_stage', d, True)
legacy = is_legacy_staging(d)
if legacy:
bb.data.setVar("SYSROOT_DESTDIR", "", d)
bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
lock = bb.utils.lockfile(lockfile)
2009-11-10 14:55:23 +00:00
bb.build.exec_func('populate_sysroot_prehook', d)
2009-11-16 22:38:23 +00:00
bb.build.exec_func('do_stage', d)
2009-11-02 20:43:22 +00:00
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
bb.build.exec_func(f, d)
2009-11-10 14:55:23 +00:00
bb.build.exec_func('populate_sysroot_posthook', d)
2009-11-02 20:43:22 +00:00
bb.utils.unlockfile(lock)
else:
dest = bb.data.getVar('D', d, True)
sysrootdest = bb.data.expand('${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}', d)
bb.mkdirhier(sysrootdest)
bb.build.exec_func("sysroot_stage_all", d)
#os.system('cp -pPR %s/* %s/' % (dest, sysrootdest))
2009-11-02 17:27:05 +00:00
for f in (bb.data.getVar('SYSROOT_PREPROCESS_FUNCS', d, True) or '').split():
bb.build.exec_func(f, d)
2009-11-03 22:02:34 +00:00
bb.build.exec_func("packagedstaging_fastpath", d)
2009-11-02 20:43:22 +00:00
lock = bb.utils.lockfile(lockfile)
2009-11-05 11:34:37 +00:00
os.system(bb.data.expand('cp -pPR ${SYSROOT_DESTDIR}${TMPDIR}/* ${TMPDIR}/', d))
2009-11-02 20:43:22 +00:00
bb.utils.unlockfile(lock)
2005-08-31 10:45:47 +00:00
}
addtask install after do_compile
2006-09-18 20:31:39 +00:00
do_install[dirs] = "${D} ${S} ${B}"
2007-10-29 15:20:25 +00:00
# Remove and re-create ${D} so that is it guaranteed to be empty
2007-09-05 08:41:10 +00:00
do_install[cleandirs] = "${D}"
2005-08-31 10:45:47 +00:00
base_do_install() {
:
}
base_do_package() {
:
}
2009-11-10 14:55:23 +00:00
addtask build after do_populate_sysroot
2005-08-31 10:45:47 +00:00
do_build = ""
do_build[func] = "1"
2008-03-03 22:58:30 +00:00
# Make sure MACHINE isn't exported
# (breaks binutils at least)
MACHINE[unexport] = "1"
# Make sure TARGET_ARCH isn't exported
# (breaks Makefiles using implicit rules, e.g. quilt, as GNU make has this
# in them, undocumented)
TARGET_ARCH[unexport] = "1"
# Make sure DISTRO isn't exported
# (breaks sysvinit at least)
DISTRO[unexport] = "1"
2007-08-12 11:36:37 +00:00
def base_after_parse(d):
2009-11-08 20:54:38 +00:00
import exceptions
2007-09-01 15:06:04 +00:00
source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
if not source_mirror_fetch:
need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
if need_host:
import re
this_host = bb.data.getVar('HOST_SYS', d, 1)
if not re.match(need_host, this_host):
raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
if need_machine:
import re
this_machine = bb.data.getVar('MACHINE', d, 1)
if this_machine and not re.match(need_machine, this_machine):
raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
2006-11-20 09:16:34 +00:00
pn = bb.data.getVar('PN', d, 1)
2006-11-20 12:51:14 +00:00
# OBSOLETE in bitbake 1.7.4
2006-11-20 09:16:34 +00:00
srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
if srcdate != None:
bb.data.setVar('SRCDATE', srcdate, d)
use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
if use_nls != None:
bb.data.setVar('USE_NLS', use_nls, d)
2007-08-03 14:28:42 +00:00
# Git packages should DEPEND on git-native
srcuri = bb.data.getVar('SRC_URI', d, 1)
if "git://" in srcuri:
2007-08-03 14:42:18 +00:00
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
2009-11-10 14:55:23 +00:00
depends = depends + " git-native:do_populate_sysroot"
2007-08-03 14:42:18 +00:00
bb.data.setVarFlag('do_fetch', 'depends', depends, d)
2007-08-03 14:28:42 +00:00
2009-08-25 14:08:15 +00:00
# Mercurial packages should DEPEND on mercurial-native
2009-08-25 14:19:09 +00:00
elif "hg://" in srcuri:
2009-08-25 14:08:15 +00:00
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
2009-11-10 14:55:23 +00:00
depends = depends + " mercurial-native:do_populate_sysroot"
2009-08-25 14:08:15 +00:00
bb.data.setVarFlag('do_fetch', 'depends', depends, d)
2008-10-07 09:25:11 +00:00
# OSC packages should DEPEND on osc-native
2009-08-25 14:19:09 +00:00
elif "osc://" in srcuri:
2008-10-07 09:25:11 +00:00
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
2009-11-10 14:55:23 +00:00
depends = depends + " osc-native:do_populate_sysroot"
2008-10-07 09:25:11 +00:00
bb.data.setVarFlag('do_fetch', 'depends', depends, d)
2008-05-01 11:42:24 +00:00
# bb.utils.sha256_file() will fail if hashlib isn't present, so we fallback
# on shasum-native. We need to ensure that it is staged before we fetch.
2008-05-06 15:43:12 +00:00
if bb.data.getVar('PN', d, True) != "shasum-native":
try:
import hashlib
except ImportError:
depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
2009-11-10 14:55:23 +00:00
depends = depends + " shasum-native:do_populate_sysroot"
2008-05-06 15:43:12 +00:00
bb.data.setVarFlag('do_fetch', 'depends', depends, d)
2008-05-01 11:42:24 +00:00
2008-07-17 20:49:48 +00:00
# 'multimachine' handling
2006-11-20 09:16:34 +00:00
mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
2008-07-17 20:49:48 +00:00
pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
if (pkg_arch == mach_arch):
# Already machine specific - nothing further to do
2006-11-20 09:16:34 +00:00
return
2007-08-11 22:46:16 +00:00
#
# We always try to scan SRC_URI for urls with machine overrides
# unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
#
2007-08-01 11:24:50 +00:00
override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
2008-07-17 20:49:48 +00:00
if override != '0':
paths = []
for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
if os.path.isdir(path):
paths.append(path)
if len(paths) != 0:
for s in srcuri.split():
if not s.startswith("file://"):
continue
local = bb.data.expand(bb.fetch.localpath(s, d), d)
for mp in paths:
if local.startswith(mp):
#bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
return
multiarch = pkg_arch
packages = bb.data.getVar('PACKAGES', d, 1).split()
for pkg in packages:
pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
# We could look for != PACKAGE_ARCH here but how to choose
# if multiple differences are present?
# Look through PACKAGE_ARCHS for the priority order?
if pkgarch and pkgarch == mach_arch:
multiarch = mach_arch
break
bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
2005-08-31 10:45:47 +00:00
python () {
2006-11-20 09:16:34 +00:00
base_after_parse(d)
2009-11-02 20:43:22 +00:00
if is_legacy_staging(d):
2009-11-08 20:54:38 +00:00
bb.note("Legacy staging mode for %s" % bb.data.getVar("FILE", d, True))
2005-08-31 10:45:47 +00:00
}
2008-01-03 14:07:44 +00:00
def check_app_exists(app, d):
from bb import which, data
app = data.expand(app, d)
2008-01-06 18:58:11 +00:00
path = data.getVar('PATH', d, 1)
2008-01-03 14:07:44 +00:00
return len(which(path, app)) != 0
def check_gcc3(data):
2008-04-25 15:32:45 +00:00
gcc3_versions = 'gcc-3.4.6 gcc-3.4.7 gcc-3.4 gcc34 gcc-3.4.4 gcc-3.3 gcc33 gcc-3.3.6 gcc-3.2 gcc32'
2008-01-03 14:07:44 +00:00
for gcc3 in gcc3_versions.split():
if check_app_exists(gcc3, data):
return gcc3
return False
2006-08-21 00:50:19 +00:00
# Patch handling
inherit patch
2006-11-21 14:34:40 +00:00
# Configuration data from site files
# Move to autotools.bbclass?
inherit siteinfo
2009-11-02 20:43:22 +00:00
EXPORT_FUNCTIONS do_setscene do_clean do_fetch do_unpack do_configure do_compile do_install do_package do_populate_pkgs do_rebuild do_fetchall
2005-08-31 10:45:47 +00:00
MIRRORS[func] = "0"
MIRRORS () {
${DEBIAN_MIRROR}/main http://snapshot.debian.net/archive/pool
${DEBIAN_MIRROR} ftp://ftp.de.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.au.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.cl.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hr.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.fi.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hk.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.hu.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.ie.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.it.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.jp.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.no.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.pl.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.ro.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.si.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.es.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.se.debian.org/debian/pool
${DEBIAN_MIRROR} ftp://ftp.tr.debian.org/debian/pool
${GNU_MIRROR} ftp://mirrors.kernel.org/gnu
${GNU_MIRROR} ftp://ftp.matrix.com.br/pub/gnu
${GNU_MIRROR} ftp://ftp.cs.ubc.ca/mirror2/gnu
${GNU_MIRROR} ftp://sunsite.ust.hk/pub/gnu
${GNU_MIRROR} ftp://ftp.ayamura.org/pub/gnu
2007-08-08 21:04:28 +00:00
${KERNELORG_MIRROR} http://www.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.us.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.uk.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.hk.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.au.kernel.org/pub
${KERNELORG_MIRROR} ftp://ftp.jp.kernel.org/pub
2006-05-27 21:49:50 +00:00
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.franken.de/pub/crypt/mirror/ftp.gnupg.org/gcrypt/
ftp://ftp.gnupg.org/gcrypt/ ftp://ftp.surfnet.nl/pub/security/gnupg/
ftp://ftp.gnupg.org/gcrypt/ http://gulus.USherbrooke.ca/pub/appl/GnuPG/
2006-08-27 16:01:33 +00:00
ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN
ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/
ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/
2006-06-12 20:29:38 +00:00
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnutls.org/pub/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.gnupg.org/gcrypt/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://www.mirrors.wiretapped.net/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls ftp://ftp.mirrors.wiretapped.net/pub/security/network-security/gnutls/
ftp://ftp.gnutls.org/pub/gnutls http://josefsson.org/gnutls/releases/
2006-11-20 12:51:14 +00:00
http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/
http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/
2007-08-08 21:04:28 +00:00
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tau.ac.il/pub/unix/admin/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cert.dfn.de/pub/tools/admin/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.fu-berlin.de/pub/unix/tools/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.kaizo.org/pub/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tu-darmstadt.de/pub/sysadmin/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.tux.org/pub/sites/vic.cc.purdue.edu/tools/unix/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://gd.tuwien.ac.at/utils/admin-tools/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://sunsite.ualberta.ca/pub/Mirror/lsof/
ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://the.wiretapped.net/pub/security/host-security/lsof/
http://www.apache.org/dist http://archive.apache.org/dist
2005-08-31 10:45:47 +00:00
}
2006-08-27 16:01:33 +00:00