Switch bitbake internals to use logging directly rather than bb.msg
We use a custom Logger subclass for our loggers This logger provides: - 'debug' method which accepts a debug level - 'plain' method which bypasses log formatting - 'verbose' method which is more detail than info, but less than debug (Bitbake rev: 3b2c1fe5ca56daebb24073a9dd45723d3efd2a8d) Signed-off-by: Chris Larson <chris_larson@mentor.com> Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
This commit is contained in:
parent
d3a45c7d41
commit
ecc68fa4fb
|
@ -57,23 +57,23 @@ class BBConfiguration(object):
|
||||||
self.pkgs_to_build = []
|
self.pkgs_to_build = []
|
||||||
|
|
||||||
|
|
||||||
def print_exception(exc, value, tb):
|
def print_exception(*exc_info):
|
||||||
"""Send exception information through bb.msg"""
|
logger.error("Uncaught exception: ", exc_info=exc_info)
|
||||||
bb.fatal("".join(format_exception(exc, value, tb, limit=8)))
|
sys.exit(1)
|
||||||
|
|
||||||
sys.excepthook = print_exception
|
sys.excepthook = print_exception
|
||||||
|
|
||||||
|
|
||||||
|
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||||
|
warnlog = logging.getLogger("BitBake.Warnings")
|
||||||
_warnings_showwarning = warnings.showwarning
|
_warnings_showwarning = warnings.showwarning
|
||||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||||
"""Display python warning messages using bb.msg"""
|
|
||||||
if file is not None:
|
if file is not None:
|
||||||
if _warnings_showwarning is not None:
|
if _warnings_showwarning is not None:
|
||||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||||
else:
|
else:
|
||||||
s = warnings.formatwarning(message, category, filename, lineno)
|
s = warnings.formatwarning(message, category, filename, lineno)
|
||||||
s = s.split("\n")[0]
|
warnlog.warn(s)
|
||||||
bb.msg.warn(None, s)
|
|
||||||
|
|
||||||
warnings.showwarning = _showwarning
|
warnings.showwarning = _showwarning
|
||||||
warnings.filterwarnings("ignore")
|
warnings.filterwarnings("ignore")
|
||||||
|
|
|
@ -35,7 +35,25 @@ class NullHandler(logging.Handler):
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
Logger = logging.getLoggerClass()
|
||||||
|
class BBLogger(Logger):
|
||||||
|
def __init__(self, name):
|
||||||
|
if name.split(".")[0] == "BitBake":
|
||||||
|
self.debug = self.bbdebug
|
||||||
|
Logger.__init__(self, name)
|
||||||
|
|
||||||
|
def bbdebug(self, level, msg, *args, **kwargs):
|
||||||
|
return self.log(logging.DEBUG - level - 1, msg, *args, **kwargs)
|
||||||
|
|
||||||
|
def plain(self, msg, *args, **kwargs):
|
||||||
|
return self.log(logging.INFO + 1, msg, *args, **kwargs)
|
||||||
|
|
||||||
|
def verbose(self, msg, *args, **kwargs):
|
||||||
|
return self.log(logging.INFO - 1, msg, *args, **kwargs)
|
||||||
|
|
||||||
logging.raiseExceptions = False
|
logging.raiseExceptions = False
|
||||||
|
logging.setLoggerClass(BBLogger)
|
||||||
|
|
||||||
logger = logging.getLogger("BitBake")
|
logger = logging.getLogger("BitBake")
|
||||||
logger.addHandler(NullHandler())
|
logger.addHandler(NullHandler())
|
||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
|
@ -48,22 +66,23 @@ if "BBDEBUG" in os.environ:
|
||||||
|
|
||||||
# Messaging convenience functions
|
# Messaging convenience functions
|
||||||
def plain(*args):
|
def plain(*args):
|
||||||
bb.msg.plain(''.join(args))
|
logger.plain(''.join(args))
|
||||||
|
|
||||||
def debug(lvl, *args):
|
def debug(lvl, *args):
|
||||||
bb.msg.debug(lvl, None, ''.join(args))
|
logger.debug(lvl, ''.join(args))
|
||||||
|
|
||||||
def note(*args):
|
def note(*args):
|
||||||
bb.msg.note(1, None, ''.join(args))
|
logger.info(''.join(args))
|
||||||
|
|
||||||
def warn(*args):
|
def warn(*args):
|
||||||
bb.msg.warn(None, ''.join(args))
|
logger.warn(''.join(args))
|
||||||
|
|
||||||
def error(*args):
|
def error(*args):
|
||||||
bb.msg.error(None, ''.join(args))
|
logger.error(''.join(args))
|
||||||
|
|
||||||
def fatal(*args):
|
def fatal(*args):
|
||||||
bb.msg.fatal(None, ''.join(args))
|
logger.critical(''.join(args))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def deprecated(func, name = None, advice = ""):
|
def deprecated(func, name = None, advice = ""):
|
||||||
|
|
|
@ -26,9 +26,14 @@
|
||||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
from bb import data, event, mkdirhier, utils
|
from bb import data, event, mkdirhier, utils
|
||||||
import bb, os, sys
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
import bb.utils
|
import bb.utils
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Build")
|
||||||
|
|
||||||
# When we execute a python function we'd like certain things
|
# When we execute a python function we'd like certain things
|
||||||
# in all namespaces, hence we add them to __builtins__
|
# in all namespaces, hence we add them to __builtins__
|
||||||
# If we do not do this and use the exec globals, they will
|
# If we do not do this and use the exec globals, they will
|
||||||
|
@ -193,7 +198,8 @@ def exec_func_shell(func, d, runfile, logfile, flags):
|
||||||
|
|
||||||
f = open(runfile, "w")
|
f = open(runfile, "w")
|
||||||
f.write("#!/bin/sh -e\n")
|
f.write("#!/bin/sh -e\n")
|
||||||
if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
|
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||||
|
f.write("set -x\n")
|
||||||
data.emit_func(func, f, d)
|
data.emit_func(func, f, d)
|
||||||
|
|
||||||
f.write("cd %s\n" % os.getcwd())
|
f.write("cd %s\n" % os.getcwd())
|
||||||
|
@ -226,7 +232,7 @@ def exec_task(fn, task, d):
|
||||||
# Check whther this is a valid task
|
# Check whther this is a valid task
|
||||||
if not data.getVarFlag(task, 'task', d):
|
if not data.getVarFlag(task, 'task', d):
|
||||||
event.fire(TaskInvalid(task, d), d)
|
event.fire(TaskInvalid(task, d), d)
|
||||||
bb.msg.error(bb.msg.domain.Build, "No such task: %s" % task)
|
logger.error("No such task: %s" % task)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
quieterr = False
|
quieterr = False
|
||||||
|
@ -234,7 +240,7 @@ def exec_task(fn, task, d):
|
||||||
quieterr = True
|
quieterr = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task)
|
logger.debug(1, "Executing task %s", task)
|
||||||
old_overrides = data.getVar('OVERRIDES', d, 0)
|
old_overrides = data.getVar('OVERRIDES', d, 0)
|
||||||
localdata = data.createCopy(d)
|
localdata = data.createCopy(d)
|
||||||
data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
|
data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
|
||||||
|
@ -269,8 +275,8 @@ def exec_task(fn, task, d):
|
||||||
si = file('/dev/null', 'r')
|
si = file('/dev/null', 'r')
|
||||||
try:
|
try:
|
||||||
so = file(logfile, 'w')
|
so = file(logfile, 'w')
|
||||||
except OSError as e:
|
except OSError:
|
||||||
bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
|
logger.exception("Opening log file '%s'", logfile)
|
||||||
pass
|
pass
|
||||||
se = so
|
se = so
|
||||||
|
|
||||||
|
@ -312,7 +318,7 @@ def exec_task(fn, task, d):
|
||||||
logfile = None
|
logfile = None
|
||||||
msg = message
|
msg = message
|
||||||
if not quieterr:
|
if not quieterr:
|
||||||
bb.msg.error(bb.msg.domain.Build, "Task failed: %s" % message )
|
logger.error("Task failed: %s" % message )
|
||||||
failedevent = TaskFailed(msg, logfile, task, d)
|
failedevent = TaskFailed(msg, logfile, task, d)
|
||||||
event.fire(failedevent, d)
|
event.fire(failedevent, d)
|
||||||
return 1
|
return 1
|
||||||
|
@ -320,8 +326,8 @@ def exec_task(fn, task, d):
|
||||||
except Exception:
|
except Exception:
|
||||||
from traceback import format_exc
|
from traceback import format_exc
|
||||||
if not quieterr:
|
if not quieterr:
|
||||||
bb.msg.error(bb.msg.domain.Build, "Build of %s failed" % (task))
|
logger.error("Build of %s failed" % (task))
|
||||||
bb.msg.error(bb.msg.domain.Build, format_exc())
|
logger.error(format_exc())
|
||||||
failedevent = TaskFailed("Task Failed", None, task, d)
|
failedevent = TaskFailed("Task Failed", None, task, d)
|
||||||
event.fire(failedevent, d)
|
event.fire(failedevent, d)
|
||||||
return 1
|
return 1
|
||||||
|
@ -342,7 +348,7 @@ def exec_task(fn, task, d):
|
||||||
se.close()
|
se.close()
|
||||||
|
|
||||||
if logfile and os.path.exists(logfile) and os.path.getsize(logfile) == 0:
|
if logfile and os.path.exists(logfile) and os.path.getsize(logfile) == 0:
|
||||||
bb.msg.debug(2, bb.msg.domain.Build, "Zero size logfile %s, removing" % logfile)
|
logger.debug(2, "Zero size logfile %s, removing", logfile)
|
||||||
os.remove(logfile)
|
os.remove(logfile)
|
||||||
try:
|
try:
|
||||||
os.remove(loglink)
|
os.remove(loglink)
|
||||||
|
|
|
@ -29,14 +29,17 @@
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
import bb.data
|
import bb.data
|
||||||
import bb.utils
|
import bb.utils
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Cache")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import pickle
|
import pickle
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
|
logger.info("Importing cPickle failed. Falling back to a very slow implementation.")
|
||||||
|
|
||||||
__cache_version__ = "132"
|
__cache_version__ = "132"
|
||||||
|
|
||||||
|
@ -57,13 +60,13 @@ class Cache:
|
||||||
|
|
||||||
if self.cachedir in [None, '']:
|
if self.cachedir in [None, '']:
|
||||||
self.has_cache = False
|
self.has_cache = False
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
|
logger.info("Not using a cache. Set CACHE = <directory> to enable.")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.has_cache = True
|
self.has_cache = True
|
||||||
self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
|
self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
|
logger.debug(1, "Using cache in '%s'", self.cachedir)
|
||||||
bb.utils.mkdirhier(self.cachedir)
|
bb.utils.mkdirhier(self.cachedir)
|
||||||
|
|
||||||
# If any of configuration.data's dependencies are newer than the
|
# If any of configuration.data's dependencies are newer than the
|
||||||
|
@ -84,14 +87,14 @@ class Cache:
|
||||||
if version_data['BITBAKE_VER'] != bb.__version__:
|
if version_data['BITBAKE_VER'] != bb.__version__:
|
||||||
raise ValueError('Bitbake Version Mismatch')
|
raise ValueError('Bitbake Version Mismatch')
|
||||||
except EOFError:
|
except EOFError:
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...")
|
logger.info("Truncated cache found, rebuilding...")
|
||||||
self.depends_cache = {}
|
self.depends_cache = {}
|
||||||
except:
|
except:
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
|
logger.info("Invalid cache found, rebuilding...")
|
||||||
self.depends_cache = {}
|
self.depends_cache = {}
|
||||||
else:
|
else:
|
||||||
if os.path.isfile(self.cachefile):
|
if os.path.isfile(self.cachefile):
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
|
logger.info("Out of date cache found, rebuilding...")
|
||||||
|
|
||||||
def getVar(self, var, fn, exp = 0):
|
def getVar(self, var, fn, exp = 0):
|
||||||
"""
|
"""
|
||||||
|
@ -111,7 +114,7 @@ class Cache:
|
||||||
if fn != self.data_fn:
|
if fn != self.data_fn:
|
||||||
# We're trying to access data in the cache which doesn't exist
|
# We're trying to access data in the cache which doesn't exist
|
||||||
# yet setData hasn't been called to setup the right access. Very bad.
|
# yet setData hasn't been called to setup the right access. Very bad.
|
||||||
bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
|
logger.error("data_fn %s and fn %s don't match", self.data_fn, fn)
|
||||||
|
|
||||||
self.cacheclean = False
|
self.cacheclean = False
|
||||||
result = bb.data.getVar(var, self.data, exp)
|
result = bb.data.getVar(var, self.data, exp)
|
||||||
|
@ -152,7 +155,6 @@ class Cache:
|
||||||
if virtualfn.startswith('virtual:'):
|
if virtualfn.startswith('virtual:'):
|
||||||
cls = virtualfn.split(':', 2)[1]
|
cls = virtualfn.split(':', 2)[1]
|
||||||
fn = virtualfn.replace('virtual:' + cls + ':', '')
|
fn = virtualfn.replace('virtual:' + cls + ':', '')
|
||||||
#bb.msg.debug(2, bb.msg.domain.Cache, "virtualfn2realfn %s to %s %s" % (virtualfn, fn, cls))
|
|
||||||
return (fn, cls)
|
return (fn, cls)
|
||||||
|
|
||||||
def realfn2virtual(self, realfn, cls):
|
def realfn2virtual(self, realfn, cls):
|
||||||
|
@ -160,9 +162,7 @@ class Cache:
|
||||||
Convert a real filename + the associated subclass keyword to a virtual filename
|
Convert a real filename + the associated subclass keyword to a virtual filename
|
||||||
"""
|
"""
|
||||||
if cls == "":
|
if cls == "":
|
||||||
#bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and '%s' to %s" % (realfn, cls, realfn))
|
|
||||||
return realfn
|
return realfn
|
||||||
#bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and %s to %s" % (realfn, cls, "virtual:" + cls + ":" + realfn))
|
|
||||||
return "virtual:" + cls + ":" + realfn
|
return "virtual:" + cls + ":" + realfn
|
||||||
|
|
||||||
def loadDataFull(self, virtualfn, appends, cfgData):
|
def loadDataFull(self, virtualfn, appends, cfgData):
|
||||||
|
@ -173,7 +173,7 @@ class Cache:
|
||||||
|
|
||||||
(fn, cls) = self.virtualfn2realfn(virtualfn)
|
(fn, cls) = self.virtualfn2realfn(virtualfn)
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn)
|
logger.debug(1, "Parsing %s (full)", fn)
|
||||||
|
|
||||||
bb_data = self.load_bbfile(fn, appends, cfgData)
|
bb_data = self.load_bbfile(fn, appends, cfgData)
|
||||||
return bb_data[cls]
|
return bb_data[cls]
|
||||||
|
@ -198,13 +198,13 @@ class Cache:
|
||||||
virtualfn = self.realfn2virtual(fn, cls)
|
virtualfn = self.realfn2virtual(fn, cls)
|
||||||
if self.depends_cache[virtualfn]["__SKIPPED"]:
|
if self.depends_cache[virtualfn]["__SKIPPED"]:
|
||||||
skipped += 1
|
skipped += 1
|
||||||
bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
|
logger.debug(1, "Skipping %s", virtualfn)
|
||||||
continue
|
continue
|
||||||
self.handle_data(virtualfn, cacheData)
|
self.handle_data(virtualfn, cacheData)
|
||||||
virtuals += 1
|
virtuals += 1
|
||||||
return True, skipped, virtuals
|
return True, skipped, virtuals
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn)
|
logger.debug(1, "Parsing %s", fn)
|
||||||
|
|
||||||
bb_data = self.load_bbfile(fn, appends, cfgData)
|
bb_data = self.load_bbfile(fn, appends, cfgData)
|
||||||
|
|
||||||
|
@ -213,7 +213,7 @@ class Cache:
|
||||||
self.setData(virtualfn, fn, bb_data[data])
|
self.setData(virtualfn, fn, bb_data[data])
|
||||||
if self.getVar("__SKIPPED", virtualfn):
|
if self.getVar("__SKIPPED", virtualfn):
|
||||||
skipped += 1
|
skipped += 1
|
||||||
bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
|
logger.debug(1, "Skipping %s", virtualfn)
|
||||||
else:
|
else:
|
||||||
self.handle_data(virtualfn, cacheData)
|
self.handle_data(virtualfn, cacheData)
|
||||||
virtuals += 1
|
virtuals += 1
|
||||||
|
@ -248,7 +248,7 @@ class Cache:
|
||||||
|
|
||||||
# File isn't in depends_cache
|
# File isn't in depends_cache
|
||||||
if not fn in self.depends_cache:
|
if not fn in self.depends_cache:
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
|
logger.debug(2, "Cache: %s is not cached", fn)
|
||||||
self.remove(fn)
|
self.remove(fn)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -256,13 +256,13 @@ class Cache:
|
||||||
|
|
||||||
# Check file still exists
|
# Check file still exists
|
||||||
if mtime == 0:
|
if mtime == 0:
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s no longer exists" % fn)
|
logger.debug(2, "Cache: %s no longer exists", fn)
|
||||||
self.remove(fn)
|
self.remove(fn)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Check the file's timestamp
|
# Check the file's timestamp
|
||||||
if mtime != self.getVar("CACHETIMESTAMP", fn, True):
|
if mtime != self.getVar("CACHETIMESTAMP", fn, True):
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
|
logger.debug(2, "Cache: %s changed", fn)
|
||||||
self.remove(fn)
|
self.remove(fn)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -277,11 +277,10 @@ class Cache:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (fmtime != old_mtime):
|
if (fmtime != old_mtime):
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
|
logger.debug(2, "Cache: %s's dependency %s changed", fn, f)
|
||||||
self.remove(fn)
|
self.remove(fn)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
|
|
||||||
if not fn in self.clean:
|
if not fn in self.clean:
|
||||||
self.clean[fn] = ""
|
self.clean[fn] = ""
|
||||||
|
|
||||||
|
@ -292,16 +291,16 @@ class Cache:
|
||||||
virtualfn = self.realfn2virtual(fn, cls)
|
virtualfn = self.realfn2virtual(fn, cls)
|
||||||
self.clean[virtualfn] = ""
|
self.clean[virtualfn] = ""
|
||||||
if not virtualfn in self.depends_cache:
|
if not virtualfn in self.depends_cache:
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % virtualfn)
|
logger.debug(2, "Cache: %s is not cached", virtualfn)
|
||||||
invalid = True
|
invalid = True
|
||||||
|
|
||||||
# If any one of the varients is not present, mark cache as invalid for all
|
# If any one of the varients is not present, mark cache as invalid for all
|
||||||
if invalid:
|
if invalid:
|
||||||
for cls in (multi or "").split():
|
for cls in (multi or "").split():
|
||||||
virtualfn = self.realfn2virtual(fn, cls)
|
virtualfn = self.realfn2virtual(fn, cls)
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % virtualfn)
|
logger.debug(2, "Cache: Removing %s from cache", virtualfn)
|
||||||
del self.clean[virtualfn]
|
del self.clean[virtualfn]
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Cache: Removing %s from cache" % fn)
|
logger.debug(2, "Cache: removing %s from cache", fn)
|
||||||
del self.clean[fn]
|
del self.clean[fn]
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -312,7 +311,7 @@ class Cache:
|
||||||
Remove a fn from the cache
|
Remove a fn from the cache
|
||||||
Called from the parser in error cases
|
Called from the parser in error cases
|
||||||
"""
|
"""
|
||||||
bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
|
logger.debug(1, "Removing %s from cache", fn)
|
||||||
if fn in self.depends_cache:
|
if fn in self.depends_cache:
|
||||||
del self.depends_cache[fn]
|
del self.depends_cache[fn]
|
||||||
if fn in self.clean:
|
if fn in self.clean:
|
||||||
|
@ -329,7 +328,7 @@ class Cache:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.cacheclean:
|
if self.cacheclean:
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.")
|
logger.info("Cache is clean, not saving.")
|
||||||
return
|
return
|
||||||
|
|
||||||
version_data = {}
|
version_data = {}
|
||||||
|
@ -339,10 +338,10 @@ class Cache:
|
||||||
cache_data = copy.copy(self.depends_cache)
|
cache_data = copy.copy(self.depends_cache)
|
||||||
for fn in self.depends_cache:
|
for fn in self.depends_cache:
|
||||||
if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']:
|
if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']:
|
||||||
bb.msg.debug(2, bb.msg.domain.Cache, "Not caching %s, marked as not cacheable" % fn)
|
logger.debug(2, "Not caching %s, marked as not cacheable", fn)
|
||||||
del cache_data[fn]
|
del cache_data[fn]
|
||||||
elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']:
|
elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']:
|
||||||
bb.msg.error(bb.msg.domain.Cache, "Not caching %s as it had SRCREVINACTION in PV. Please report this bug" % fn)
|
logger.error("Not caching %s as it had SRCREVINACTION in PV. Please report this bug", fn)
|
||||||
del cache_data[fn]
|
del cache_data[fn]
|
||||||
|
|
||||||
p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
|
p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
|
||||||
|
@ -360,7 +359,7 @@ class Cache:
|
||||||
pe = self.getVar('PE', file_name, True) or "0"
|
pe = self.getVar('PE', file_name, True) or "0"
|
||||||
pv = self.getVar('PV', file_name, True)
|
pv = self.getVar('PV', file_name, True)
|
||||||
if 'SRCREVINACTION' in pv:
|
if 'SRCREVINACTION' in pv:
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name))
|
logger.info("Found SRCREVINACTION in PV (%s) or %s. Please report this bug.", pv, file_name)
|
||||||
pr = self.getVar('PR', file_name, True)
|
pr = self.getVar('PR', file_name, True)
|
||||||
dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
|
dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
|
||||||
depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
|
depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
|
||||||
|
|
|
@ -23,12 +23,19 @@
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import sys, os, glob, os.path, re, time
|
import sys, os, glob, os.path, re, time
|
||||||
|
import logging
|
||||||
import sre_constants
|
import sre_constants
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
import bb
|
import bb
|
||||||
from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
|
from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake")
|
||||||
|
collectlog = logging.getLogger("BitBake.Collection")
|
||||||
|
buildlog = logging.getLogger("BitBake.Build")
|
||||||
|
parselog = logging.getLogger("BitBake.Parsing")
|
||||||
|
providerlog = logging.getLogger("BitBake.Provider")
|
||||||
|
|
||||||
class MultipleMatches(Exception):
|
class MultipleMatches(Exception):
|
||||||
"""
|
"""
|
||||||
Exception raised when multiple file matches are found
|
Exception raised when multiple file matches are found
|
||||||
|
@ -102,7 +109,7 @@ class BBCooker:
|
||||||
import termios
|
import termios
|
||||||
tcattr = termios.tcgetattr(fd)
|
tcattr = termios.tcgetattr(fd)
|
||||||
if tcattr[3] & termios.TOSTOP:
|
if tcattr[3] & termios.TOSTOP:
|
||||||
bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...")
|
buildlog.info("The terminal had the TOSTOP bit set, clearing...")
|
||||||
tcattr[3] = tcattr[3] & ~termios.TOSTOP
|
tcattr[3] = tcattr[3] & ~termios.TOSTOP
|
||||||
termios.tcsetattr(fd, termios.TCSANOW, tcattr)
|
termios.tcsetattr(fd, termios.TCSANOW, tcattr)
|
||||||
|
|
||||||
|
@ -118,7 +125,7 @@ class BBCooker:
|
||||||
if nice:
|
if nice:
|
||||||
curnice = os.nice(0)
|
curnice = os.nice(0)
|
||||||
nice = int(nice) - curnice
|
nice = int(nice) - curnice
|
||||||
bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice))
|
buildlog.verbose("Renice to %s " % os.nice(nice))
|
||||||
|
|
||||||
def parseCommandLine(self):
|
def parseCommandLine(self):
|
||||||
# Parse any commandline into actions
|
# Parse any commandline into actions
|
||||||
|
@ -126,11 +133,11 @@ class BBCooker:
|
||||||
self.commandlineAction = None
|
self.commandlineAction = None
|
||||||
|
|
||||||
if 'world' in self.configuration.pkgs_to_build:
|
if 'world' in self.configuration.pkgs_to_build:
|
||||||
bb.msg.error(bb.msg.domain.Build, "'world' is not a valid target for --environment.")
|
buildlog.error("'world' is not a valid target for --environment.")
|
||||||
elif len(self.configuration.pkgs_to_build) > 1:
|
elif len(self.configuration.pkgs_to_build) > 1:
|
||||||
bb.msg.error(bb.msg.domain.Build, "Only one target can be used with the --environment option.")
|
buildlog.error("Only one target can be used with the --environment option.")
|
||||||
elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
|
elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
|
||||||
bb.msg.error(bb.msg.domain.Build, "No target should be used with the --environment and --buildfile options.")
|
buildlog.error("No target should be used with the --environment and --buildfile options.")
|
||||||
elif len(self.configuration.pkgs_to_build) > 0:
|
elif len(self.configuration.pkgs_to_build) > 0:
|
||||||
self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
|
self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
|
||||||
else:
|
else:
|
||||||
|
@ -148,13 +155,13 @@ class BBCooker:
|
||||||
self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
|
self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
|
||||||
else:
|
else:
|
||||||
self.commandlineAction = None
|
self.commandlineAction = None
|
||||||
bb.msg.error(bb.msg.domain.Build, "Please specify a package name for dependency graph generation.")
|
buildlog.error("Please specify a package name for dependency graph generation.")
|
||||||
else:
|
else:
|
||||||
if self.configuration.pkgs_to_build:
|
if self.configuration.pkgs_to_build:
|
||||||
self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
|
self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
|
||||||
else:
|
else:
|
||||||
self.commandlineAction = None
|
self.commandlineAction = None
|
||||||
bb.msg.error(bb.msg.domain.Build, "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||||
|
|
||||||
def runCommands(self, server, data, abort):
|
def runCommands(self, server, data, abort):
|
||||||
"""
|
"""
|
||||||
|
@ -180,8 +187,8 @@ class BBCooker:
|
||||||
preferred_versions[pn] = (pref_ver, pref_file)
|
preferred_versions[pn] = (pref_ver, pref_file)
|
||||||
latest_versions[pn] = (last_ver, last_file)
|
latest_versions[pn] = (last_ver, last_file)
|
||||||
|
|
||||||
bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version"))
|
logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
|
||||||
bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "================="))
|
logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
|
||||||
|
|
||||||
for p in sorted(pkg_pn):
|
for p in sorted(pkg_pn):
|
||||||
pref = preferred_versions[p]
|
pref = preferred_versions[p]
|
||||||
|
@ -193,7 +200,7 @@ class BBCooker:
|
||||||
if pref == latest:
|
if pref == latest:
|
||||||
prefstr = ""
|
prefstr = ""
|
||||||
|
|
||||||
bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr))
|
logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
|
||||||
|
|
||||||
def compareRevisions(self):
|
def compareRevisions(self):
|
||||||
ret = bb.fetch.fetcher_compare_revisons(self.configuration.data)
|
ret = bb.fetch.fetcher_compare_revisons(self.configuration.data)
|
||||||
|
@ -230,27 +237,21 @@ class BBCooker:
|
||||||
if fn:
|
if fn:
|
||||||
try:
|
try:
|
||||||
envdata = self.bb_cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
|
envdata = self.bb_cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
|
||||||
except IOError as e:
|
except Exception, e:
|
||||||
bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e))
|
parselog.exception("Unable to read %s", fn)
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# emit variables and shell functions
|
# emit variables and shell functions
|
||||||
try:
|
|
||||||
data.update_data(envdata)
|
data.update_data(envdata)
|
||||||
with closing(StringIO()) as env:
|
with closing(StringIO()) as env:
|
||||||
data.emit_env(env, envdata, True)
|
data.emit_env(env, envdata, True)
|
||||||
bb.msg.plain(env.getvalue())
|
logger.plain(env.getvalue())
|
||||||
except Exception as e:
|
|
||||||
bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
|
|
||||||
|
|
||||||
# emit the metadata which isnt valid shell
|
# emit the metadata which isnt valid shell
|
||||||
data.expandKeys(envdata)
|
data.expandKeys(envdata)
|
||||||
for e in envdata.keys():
|
for e in envdata.keys():
|
||||||
if data.getVarFlag( e, 'python', envdata ):
|
if data.getVarFlag( e, 'python', envdata ):
|
||||||
bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1)))
|
logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
|
||||||
|
|
||||||
def generateDepTreeData(self, pkgs_to_build, task):
|
def generateDepTreeData(self, pkgs_to_build, task):
|
||||||
"""
|
"""
|
||||||
|
@ -374,7 +375,7 @@ class BBCooker:
|
||||||
for rdepend in depgraph["rdepends-pn"][pn]:
|
for rdepend in depgraph["rdepends-pn"][pn]:
|
||||||
print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
|
print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
|
||||||
print("}", file=depends_file)
|
print("}", file=depends_file)
|
||||||
bb.msg.plain("PN dependencies saved to 'pn-depends.dot'")
|
logger.info("PN dependencies saved to 'pn-depends.dot'")
|
||||||
|
|
||||||
depends_file = file('package-depends.dot', 'w' )
|
depends_file = file('package-depends.dot', 'w' )
|
||||||
print("digraph depends {", file=depends_file)
|
print("digraph depends {", file=depends_file)
|
||||||
|
@ -395,7 +396,7 @@ class BBCooker:
|
||||||
for rdepend in depgraph["rrecs-pkg"][package]:
|
for rdepend in depgraph["rrecs-pkg"][package]:
|
||||||
print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
|
print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
|
||||||
print("}", file=depends_file)
|
print("}", file=depends_file)
|
||||||
bb.msg.plain("Package dependencies saved to 'package-depends.dot'")
|
logger.info("Package dependencies saved to 'package-depends.dot'")
|
||||||
|
|
||||||
tdepends_file = file('task-depends.dot', 'w' )
|
tdepends_file = file('task-depends.dot', 'w' )
|
||||||
print("digraph depends {", file=tdepends_file)
|
print("digraph depends {", file=tdepends_file)
|
||||||
|
@ -407,7 +408,7 @@ class BBCooker:
|
||||||
for dep in depgraph["tdepends"][task]:
|
for dep in depgraph["tdepends"][task]:
|
||||||
print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
|
print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
|
||||||
print("}", file=tdepends_file)
|
print("}", file=tdepends_file)
|
||||||
bb.msg.plain("Task dependencies saved to 'task-depends.dot'")
|
logger.info("Task dependencies saved to 'task-depends.dot'")
|
||||||
|
|
||||||
def buildDepgraph( self ):
|
def buildDepgraph( self ):
|
||||||
all_depends = self.status.all_depends
|
all_depends = self.status.all_depends
|
||||||
|
@ -431,10 +432,10 @@ class BBCooker:
|
||||||
try:
|
try:
|
||||||
(providee, provider) = p.split(':')
|
(providee, provider) = p.split(':')
|
||||||
except:
|
except:
|
||||||
bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
|
providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
|
||||||
continue
|
continue
|
||||||
if providee in self.status.preferred and self.status.preferred[providee] != provider:
|
if providee in self.status.preferred and self.status.preferred[providee] != provider:
|
||||||
bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee]))
|
providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee])
|
||||||
self.status.preferred[providee] = provider
|
self.status.preferred[providee] = provider
|
||||||
|
|
||||||
# Calculate priorities for each file
|
# Calculate priorities for each file
|
||||||
|
@ -443,8 +444,7 @@ class BBCooker:
|
||||||
|
|
||||||
for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
|
for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
|
||||||
if not regex in matched:
|
if not regex in matched:
|
||||||
bb.msg.warn(bb.msg.domain.Provider, "No bb files matched BBFILE_PATTERN_%s '%s'" %
|
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||||
(collection, pattern))
|
|
||||||
|
|
||||||
def buildWorldTargetList(self):
|
def buildWorldTargetList(self):
|
||||||
"""
|
"""
|
||||||
|
@ -452,19 +452,19 @@ class BBCooker:
|
||||||
"""
|
"""
|
||||||
all_depends = self.status.all_depends
|
all_depends = self.status.all_depends
|
||||||
pn_provides = self.status.pn_provides
|
pn_provides = self.status.pn_provides
|
||||||
bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"")
|
parselog.debug(1, "collating packages for \"world\"")
|
||||||
for f in self.status.possible_world:
|
for f in self.status.possible_world:
|
||||||
terminal = True
|
terminal = True
|
||||||
pn = self.status.pkg_fn[f]
|
pn = self.status.pkg_fn[f]
|
||||||
|
|
||||||
for p in pn_provides[pn]:
|
for p in pn_provides[pn]:
|
||||||
if p.startswith('virtual/'):
|
if p.startswith('virtual/'):
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p))
|
parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
|
||||||
terminal = False
|
terminal = False
|
||||||
break
|
break
|
||||||
for pf in self.status.providers[p]:
|
for pf in self.status.providers[p]:
|
||||||
if self.status.pkg_fn[pf] != pn:
|
if self.status.pkg_fn[pf] != pn:
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p))
|
parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
|
||||||
terminal = False
|
terminal = False
|
||||||
break
|
break
|
||||||
if terminal:
|
if terminal:
|
||||||
|
@ -478,8 +478,9 @@ class BBCooker:
|
||||||
"""Drop off into a shell"""
|
"""Drop off into a shell"""
|
||||||
try:
|
try:
|
||||||
from bb import shell
|
from bb import shell
|
||||||
except ImportError as details:
|
except ImportError:
|
||||||
bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details )
|
parselog.exception("Interactive mode not available")
|
||||||
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
shell.start( self )
|
shell.start( self )
|
||||||
|
|
||||||
|
@ -502,14 +503,14 @@ class BBCooker:
|
||||||
|
|
||||||
layerconf = self._findLayerConf()
|
layerconf = self._findLayerConf()
|
||||||
if layerconf:
|
if layerconf:
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "Found bblayers.conf (%s)" % layerconf)
|
parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
|
||||||
data = bb.parse.handle(layerconf, data)
|
data = bb.parse.handle(layerconf, data)
|
||||||
|
|
||||||
layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
|
layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
|
||||||
|
|
||||||
data = bb.data.createCopy(data)
|
data = bb.data.createCopy(data)
|
||||||
for layer in layers:
|
for layer in layers:
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "Adding layer %s" % layer)
|
parselog.debug(2, "Adding layer %s", layer)
|
||||||
bb.data.setVar('LAYERDIR', layer, data)
|
bb.data.setVar('LAYERDIR', layer, data)
|
||||||
data = bb.parse.handle(os.path.join(layer, "conf", "layer.conf"), data)
|
data = bb.parse.handle(os.path.join(layer, "conf", "layer.conf"), data)
|
||||||
|
|
||||||
|
@ -529,7 +530,7 @@ class BBCooker:
|
||||||
bb.data.delVar('LAYERDIR', data)
|
bb.data.delVar('LAYERDIR', data)
|
||||||
|
|
||||||
if not data.getVar("BBPATH", True):
|
if not data.getVar("BBPATH", True):
|
||||||
bb.fatal("The BBPATH variable is not set")
|
raise SystemExit("The BBPATH variable is not set")
|
||||||
|
|
||||||
data = bb.parse.handle(os.path.join("conf", "bitbake.conf"), data)
|
data = bb.parse.handle(os.path.join("conf", "bitbake.conf"), data)
|
||||||
|
|
||||||
|
@ -553,10 +554,9 @@ class BBCooker:
|
||||||
|
|
||||||
bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
|
bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
|
||||||
|
|
||||||
except IOError as e:
|
except (IOError, bb.parse.ParseError):
|
||||||
bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (files, str(e)))
|
parselog.exception("Error when parsing %s", files)
|
||||||
except bb.parse.ParseError as details:
|
sys.exit(1)
|
||||||
bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (files, details) )
|
|
||||||
|
|
||||||
def handleCollections( self, collections ):
|
def handleCollections( self, collections ):
|
||||||
"""Handle collections"""
|
"""Handle collections"""
|
||||||
|
@ -565,22 +565,22 @@ class BBCooker:
|
||||||
for c in collection_list:
|
for c in collection_list:
|
||||||
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
|
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
|
||||||
if regex == None:
|
if regex == None:
|
||||||
bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c)
|
parselog.error("BBFILE_PATTERN_%s not defined" % c)
|
||||||
continue
|
continue
|
||||||
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
|
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
|
||||||
if priority == None:
|
if priority == None:
|
||||||
bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c)
|
parselog.error("BBFILE_PRIORITY_%s not defined" % c)
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
cre = re.compile(regex)
|
cre = re.compile(regex)
|
||||||
except re.error:
|
except re.error:
|
||||||
bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex))
|
parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
pri = int(priority)
|
pri = int(priority)
|
||||||
self.status.bbfile_config_priorities.append((c, regex, cre, pri))
|
self.status.bbfile_config_priorities.append((c, regex, cre, pri))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority))
|
parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
|
||||||
|
|
||||||
def buildSetVars(self):
|
def buildSetVars(self):
|
||||||
"""
|
"""
|
||||||
|
@ -616,9 +616,9 @@ class BBCooker:
|
||||||
"""
|
"""
|
||||||
matches = self.matchFiles(buildfile)
|
matches = self.matchFiles(buildfile)
|
||||||
if len(matches) != 1:
|
if len(matches) != 1:
|
||||||
bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches)))
|
parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
|
||||||
for f in matches:
|
for f in matches:
|
||||||
bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
|
parselog.error(" %s" % f)
|
||||||
raise MultipleMatches
|
raise MultipleMatches
|
||||||
return matches[0]
|
return matches[0]
|
||||||
|
|
||||||
|
@ -662,7 +662,7 @@ class BBCooker:
|
||||||
|
|
||||||
# Remove stamp for target if force mode active
|
# Remove stamp for target if force mode active
|
||||||
if self.configuration.force:
|
if self.configuration.force:
|
||||||
bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn))
|
logger.verbose("Remove stamp %s, %s", task, fn)
|
||||||
bb.build.del_stamp('do_%s' % task, self.status, fn)
|
bb.build.del_stamp('do_%s' % task, self.status, fn)
|
||||||
|
|
||||||
# Setup taskdata structure
|
# Setup taskdata structure
|
||||||
|
@ -691,7 +691,7 @@ class BBCooker:
|
||||||
retval = rq.execute_runqueue()
|
retval = rq.execute_runqueue()
|
||||||
except runqueue.TaskFailure as exc:
|
except runqueue.TaskFailure as exc:
|
||||||
for fnid in exc.args:
|
for fnid in exc.args:
|
||||||
bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
|
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
||||||
failures = failures + 1
|
failures = failures + 1
|
||||||
retval = False
|
retval = False
|
||||||
if not retval:
|
if not retval:
|
||||||
|
@ -728,7 +728,7 @@ class BBCooker:
|
||||||
retval = rq.execute_runqueue()
|
retval = rq.execute_runqueue()
|
||||||
except runqueue.TaskFailure as exc:
|
except runqueue.TaskFailure as exc:
|
||||||
for fnid in exc.args:
|
for fnid in exc.args:
|
||||||
bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
|
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
||||||
failures = failures + 1
|
failures = failures + 1
|
||||||
retval = False
|
retval = False
|
||||||
if not retval:
|
if not retval:
|
||||||
|
@ -779,11 +779,11 @@ class BBCooker:
|
||||||
try:
|
try:
|
||||||
import psyco
|
import psyco
|
||||||
except ImportError:
|
except ImportError:
|
||||||
bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
|
collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
|
||||||
else:
|
else:
|
||||||
psyco.bind( CookerParser.parse_next )
|
psyco.bind( CookerParser.parse_next )
|
||||||
else:
|
else:
|
||||||
bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.")
|
collectlog.info("You have disabled Psyco. This decreases performance.")
|
||||||
|
|
||||||
self.status = bb.cache.CacheData()
|
self.status = bb.cache.CacheData()
|
||||||
|
|
||||||
|
@ -802,7 +802,7 @@ class BBCooker:
|
||||||
self.cookerState = cookerParsing
|
self.cookerState = cookerParsing
|
||||||
|
|
||||||
if not self.parser.parse_next():
|
if not self.parser.parse_next():
|
||||||
bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete")
|
collectlog.debug(1, "parsing complete")
|
||||||
self.buildDepgraph()
|
self.buildDepgraph()
|
||||||
self.cookerState = cookerParsed
|
self.cookerState = cookerParsed
|
||||||
return None
|
return None
|
||||||
|
@ -850,7 +850,7 @@ class BBCooker:
|
||||||
parsed, cached, skipped, masked = 0, 0, 0, 0
|
parsed, cached, skipped, masked = 0, 0, 0, 0
|
||||||
self.bb_cache = bb.cache.init(self)
|
self.bb_cache = bb.cache.init(self)
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files")
|
collectlog.debug(1, "collecting .bb files")
|
||||||
|
|
||||||
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
|
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
|
||||||
data.setVar("BBFILES", " ".join(files), self.configuration.data)
|
data.setVar("BBFILES", " ".join(files), self.configuration.data)
|
||||||
|
@ -859,7 +859,7 @@ class BBCooker:
|
||||||
files = self.get_bbfiles()
|
files = self.get_bbfiles()
|
||||||
|
|
||||||
if not len(files):
|
if not len(files):
|
||||||
bb.msg.error(bb.msg.domain.Collection, "no recipe files to build, check your BBPATH and BBFILES?")
|
collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
|
||||||
bb.event.fire(CookerExit(), self.configuration.event_data)
|
bb.event.fire(CookerExit(), self.configuration.event_data)
|
||||||
|
|
||||||
newfiles = set()
|
newfiles = set()
|
||||||
|
@ -879,13 +879,14 @@ class BBCooker:
|
||||||
try:
|
try:
|
||||||
bbmask_compiled = re.compile(bbmask)
|
bbmask_compiled = re.compile(bbmask)
|
||||||
except sre_constants.error:
|
except sre_constants.error:
|
||||||
bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.")
|
collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
|
||||||
|
return list(newfiles), 0
|
||||||
|
|
||||||
bbfiles = []
|
bbfiles = []
|
||||||
bbappend = []
|
bbappend = []
|
||||||
for f in newfiles:
|
for f in newfiles:
|
||||||
if bbmask and bbmask_compiled.search(f):
|
if bbmask and bbmask_compiled.search(f):
|
||||||
bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f)
|
collectlog.debug(1, "skipping masked file %s", f)
|
||||||
masked += 1
|
masked += 1
|
||||||
continue
|
continue
|
||||||
if f.endswith('.bb'):
|
if f.endswith('.bb'):
|
||||||
|
@ -893,7 +894,7 @@ class BBCooker:
|
||||||
elif f.endswith('.bbappend'):
|
elif f.endswith('.bbappend'):
|
||||||
bbappend.append(f)
|
bbappend.append(f)
|
||||||
else:
|
else:
|
||||||
bb.msg.note(1, bb.msg.domain.Collection, "File %s of unknown filetype in BBFILES? Ignorning..." % f)
|
collectlog.debug(1, "skipping %s: unknown file extension", f)
|
||||||
|
|
||||||
# Build a list of .bbappend files for each .bb file
|
# Build a list of .bbappend files for each .bb file
|
||||||
self.appendlist = {}
|
self.appendlist = {}
|
||||||
|
@ -1008,11 +1009,6 @@ class CookerParser:
|
||||||
self.skipped += skipped
|
self.skipped += skipped
|
||||||
self.virtuals += virtuals
|
self.virtuals += virtuals
|
||||||
|
|
||||||
except IOError as e:
|
|
||||||
self.error += 1
|
|
||||||
cooker.bb_cache.remove(f)
|
|
||||||
bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
|
|
||||||
pass
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
cooker.bb_cache.remove(f)
|
cooker.bb_cache.remove(f)
|
||||||
cooker.bb_cache.sync()
|
cooker.bb_cache.sync()
|
||||||
|
@ -1020,7 +1016,7 @@ class CookerParser:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.error += 1
|
self.error += 1
|
||||||
cooker.bb_cache.remove(f)
|
cooker.bb_cache.remove(f)
|
||||||
bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f))
|
parselog.exception("Unable to open %s", f)
|
||||||
except:
|
except:
|
||||||
cooker.bb_cache.remove(f)
|
cooker.bb_cache.remove(f)
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -30,10 +30,12 @@ BitBake build tools.
|
||||||
|
|
||||||
import copy, re, sys
|
import copy, re, sys
|
||||||
from collections import MutableMapping
|
from collections import MutableMapping
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import utils
|
from bb import utils
|
||||||
from bb.COW import COWDictBase
|
from bb.COW import COWDictBase
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Data")
|
||||||
|
|
||||||
__setvar_keyword__ = ["_append", "_prepend"]
|
__setvar_keyword__ = ["_append", "_prepend"]
|
||||||
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
|
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
|
||||||
|
@ -101,10 +103,8 @@ class DataSmart(MutableMapping):
|
||||||
s = __expand_python_regexp__.sub(varparse.python_sub, s)
|
s = __expand_python_regexp__.sub(varparse.python_sub, s)
|
||||||
if s == olds:
|
if s == olds:
|
||||||
break
|
break
|
||||||
except KeyboardInterrupt:
|
except Exception:
|
||||||
raise
|
logger.exception("Error evaluating '%s'", s)
|
||||||
except:
|
|
||||||
bb.msg.note(1, bb.msg.domain.Data, "%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
varparse.value = s
|
varparse.value = s
|
||||||
|
@ -152,7 +152,7 @@ class DataSmart(MutableMapping):
|
||||||
try:
|
try:
|
||||||
self[name] = self[var]
|
self[name] = self[var]
|
||||||
except Exception:
|
except Exception:
|
||||||
bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar")
|
logger.info("Untracked delVar")
|
||||||
|
|
||||||
# now on to the appends and prepends
|
# now on to the appends and prepends
|
||||||
for op in __setvar_keyword__:
|
for op in __setvar_keyword__:
|
||||||
|
|
|
@ -27,10 +27,13 @@ BitBake build tools.
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import os, re
|
import os, re
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb import persist_data
|
from bb import persist_data
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Fetch")
|
||||||
|
|
||||||
class MalformedUrl(Exception):
|
class MalformedUrl(Exception):
|
||||||
"""Exception raised when encountering an invalid url"""
|
"""Exception raised when encountering an invalid url"""
|
||||||
|
|
||||||
|
@ -117,9 +120,8 @@ def encodeurl(decoded):
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def uri_replace(uri, uri_find, uri_replace, d):
|
def uri_replace(uri, uri_find, uri_replace, d):
|
||||||
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
|
|
||||||
if not uri or not uri_find or not uri_replace:
|
if not uri or not uri_find or not uri_replace:
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
|
logger.debug(1, "uri_replace: passed an undefined value, not replacing")
|
||||||
uri_decoded = list(decodeurl(uri))
|
uri_decoded = list(decodeurl(uri))
|
||||||
uri_find_decoded = list(decodeurl(uri_find))
|
uri_find_decoded = list(decodeurl(uri_find))
|
||||||
uri_replace_decoded = list(decodeurl(uri_replace))
|
uri_replace_decoded = list(decodeurl(uri_replace))
|
||||||
|
@ -135,13 +137,8 @@ def uri_replace(uri, uri_find, uri_replace, d):
|
||||||
localfn = bb.fetch.localpath(uri, d)
|
localfn = bb.fetch.localpath(uri, d)
|
||||||
if localfn:
|
if localfn:
|
||||||
result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
|
result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
|
||||||
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
|
|
||||||
else:
|
else:
|
||||||
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
|
|
||||||
return uri
|
return uri
|
||||||
# else:
|
|
||||||
# for j in i:
|
|
||||||
# FIXME: apply replacements against options
|
|
||||||
return encodeurl(result_decoded)
|
return encodeurl(result_decoded)
|
||||||
|
|
||||||
methods = []
|
methods = []
|
||||||
|
@ -158,9 +155,9 @@ def fetcher_init(d):
|
||||||
# When to drop SCM head revisions controlled by user policy
|
# When to drop SCM head revisions controlled by user policy
|
||||||
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
|
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
|
||||||
if srcrev_policy == "cache":
|
if srcrev_policy == "cache":
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
|
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||||
elif srcrev_policy == "clear":
|
elif srcrev_policy == "clear":
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
|
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||||
try:
|
try:
|
||||||
bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS")
|
bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS")
|
||||||
except:
|
except:
|
||||||
|
@ -190,11 +187,11 @@ def fetcher_compare_revisons(d):
|
||||||
changed = False
|
changed = False
|
||||||
for key in data:
|
for key in data:
|
||||||
if key not in data2 or data2[key] != data[key]:
|
if key not in data2 or data2[key] != data[key]:
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key)
|
logger.debug(1, "%s changed", key)
|
||||||
changed = True
|
changed = True
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key)
|
logger.debug(2, "%s did not change", key)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Function call order is usually:
|
# Function call order is usually:
|
||||||
|
@ -334,7 +331,7 @@ def checkstatus(d, urls = None):
|
||||||
for u in urls:
|
for u in urls:
|
||||||
ud = urldata[u]
|
ud = urldata[u]
|
||||||
m = ud.method
|
m = ud.method
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Testing URL %s" % u)
|
logger.debug(1, "Testing URL %s" % u)
|
||||||
# First try checking uri, u, from PREMIRRORS
|
# First try checking uri, u, from PREMIRRORS
|
||||||
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
||||||
ret = try_mirrors(d, u, mirrors, True)
|
ret = try_mirrors(d, u, mirrors, True)
|
||||||
|
@ -398,7 +395,7 @@ def get_srcrev(d):
|
||||||
scms.append(u)
|
scms.append(u)
|
||||||
|
|
||||||
if len(scms) == 0:
|
if len(scms) == 0:
|
||||||
bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
|
logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||||
raise ParameterError
|
raise ParameterError
|
||||||
|
|
||||||
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
||||||
|
@ -412,7 +409,7 @@ def get_srcrev(d):
|
||||||
#
|
#
|
||||||
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
|
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
|
||||||
if not format:
|
if not format:
|
||||||
bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
||||||
raise ParameterError
|
raise ParameterError
|
||||||
|
|
||||||
for scm in scms:
|
for scm in scms:
|
||||||
|
@ -454,7 +451,7 @@ def runfetchcmd(cmd, d, quiet = False):
|
||||||
if val:
|
if val:
|
||||||
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
|
logger.debug(1, "Running %s", cmd)
|
||||||
|
|
||||||
# redirect stderr to stdout
|
# redirect stderr to stdout
|
||||||
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
||||||
|
@ -490,7 +487,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
|
||||||
"""
|
"""
|
||||||
fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
|
fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
|
||||||
if not check and os.access(fpath, os.R_OK) and not force:
|
if not check and os.access(fpath, os.R_OK) and not force:
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % fpath)
|
logger.debug(1, "%s already exists, skipping checkout." % fpath)
|
||||||
return fpath
|
return fpath
|
||||||
|
|
||||||
ld = d.createCopy()
|
ld = d.createCopy()
|
||||||
|
@ -500,7 +497,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
|
||||||
try:
|
try:
|
||||||
ud = FetchData(newuri, ld)
|
ud = FetchData(newuri, ld)
|
||||||
except bb.fetch.NoMethodError:
|
except bb.fetch.NoMethodError:
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % uri)
|
logger.debug(1, "No method for %s", uri)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ud.setup_localpath(ld)
|
ud.setup_localpath(ld)
|
||||||
|
@ -518,7 +515,7 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
|
||||||
bb.fetch.MD5SumError):
|
bb.fetch.MD5SumError):
|
||||||
import sys
|
import sys
|
||||||
(type, value, traceback) = sys.exc_info()
|
(type, value, traceback) = sys.exc_info()
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Mirror fetch failure: %s" % value)
|
logger.debug(2, "Mirror fetch failure: %s" % value)
|
||||||
removefile(ud.localpath)
|
removefile(ud.localpath)
|
||||||
continue
|
continue
|
||||||
return None
|
return None
|
||||||
|
@ -654,7 +651,7 @@ class Fetch(object):
|
||||||
Check the status of a URL
|
Check the status of a URL
|
||||||
Assumes localpath was called first
|
Assumes localpath was called first
|
||||||
"""
|
"""
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url)
|
logger.info("URL %s could not be checked for status since no method exists.", url)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def getSRCDate(urldata, d):
|
def getSRCDate(urldata, d):
|
||||||
|
|
|
@ -25,11 +25,10 @@ BitBake 'Fetch' implementation for bzr.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch, FetchError, runfetchcmd, logger
|
||||||
from bb.fetch import FetchError
|
|
||||||
from bb.fetch import runfetchcmd
|
|
||||||
|
|
||||||
class Bzr(Fetch):
|
class Bzr(Fetch):
|
||||||
def supports(self, url, ud, d):
|
def supports(self, url, ud, d):
|
||||||
|
@ -93,16 +92,16 @@ class Bzr(Fetch):
|
||||||
|
|
||||||
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
||||||
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc)
|
logger.debug(1, "BZR Update %s", loc)
|
||||||
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
|
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
|
||||||
runfetchcmd(bzrcmd, d)
|
runfetchcmd(bzrcmd, d)
|
||||||
else:
|
else:
|
||||||
os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)))
|
os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)))
|
||||||
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc)
|
logger.debug(1, "BZR Checkout %s", loc)
|
||||||
bb.mkdirhier(ud.pkgdir)
|
bb.mkdirhier(ud.pkgdir)
|
||||||
os.chdir(ud.pkgdir)
|
os.chdir(ud.pkgdir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd)
|
logger.debug(1, "Running %s", bzrcmd)
|
||||||
runfetchcmd(bzrcmd, d)
|
runfetchcmd(bzrcmd, d)
|
||||||
|
|
||||||
os.chdir(ud.pkgdir)
|
os.chdir(ud.pkgdir)
|
||||||
|
@ -130,7 +129,7 @@ class Bzr(Fetch):
|
||||||
"""
|
"""
|
||||||
Return the latest upstream revision number
|
Return the latest upstream revision number
|
||||||
"""
|
"""
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "BZR fetcher hitting network for %s" % url)
|
logger.debug(2, "BZR fetcher hitting network for %s", url)
|
||||||
|
|
||||||
output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
|
output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
|
||||||
|
|
||||||
|
|
|
@ -27,11 +27,10 @@ BitBake build tools.
|
||||||
#
|
#
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch, FetchError, MissingParameterError, logger
|
||||||
from bb.fetch import FetchError
|
|
||||||
from bb.fetch import MissingParameterError
|
|
||||||
|
|
||||||
class Cvs(Fetch):
|
class Cvs(Fetch):
|
||||||
"""
|
"""
|
||||||
|
@ -136,21 +135,21 @@ class Cvs(Fetch):
|
||||||
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
|
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
|
||||||
|
|
||||||
# create module directory
|
# create module directory
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
|
logger.debug(2, "Fetch: checking for module directory")
|
||||||
pkg = data.expand('${PN}', d)
|
pkg = data.expand('${PN}', d)
|
||||||
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||||
moddir = os.path.join(pkgdir, localdir)
|
moddir = os.path.join(pkgdir, localdir)
|
||||||
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
|
logger.info("Update " + loc)
|
||||||
# update sources there
|
# update sources there
|
||||||
os.chdir(moddir)
|
os.chdir(moddir)
|
||||||
myret = os.system(cvsupdatecmd)
|
myret = os.system(cvsupdatecmd)
|
||||||
else:
|
else:
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
logger.info("Fetch " + loc)
|
||||||
# check out sources there
|
# check out sources there
|
||||||
bb.mkdirhier(pkgdir)
|
bb.mkdirhier(pkgdir)
|
||||||
os.chdir(pkgdir)
|
os.chdir(pkgdir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
|
logger.debug(1, "Running %s", cvscmd)
|
||||||
myret = os.system(cvscmd)
|
myret = os.system(cvscmd)
|
||||||
|
|
||||||
if myret != 0 or not os.access(moddir, os.R_OK):
|
if myret != 0 or not os.access(moddir, os.R_OK):
|
||||||
|
|
|
@ -25,6 +25,7 @@ import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch
|
||||||
from bb.fetch import runfetchcmd
|
from bb.fetch import runfetchcmd
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
class Git(Fetch):
|
class Git(Fetch):
|
||||||
"""Class to fetch a module or modules from git repositories"""
|
"""Class to fetch a module or modules from git repositories"""
|
||||||
|
@ -153,7 +154,7 @@ class Git(Fetch):
|
||||||
os.chdir(ud.clonedir)
|
os.chdir(ud.clonedir)
|
||||||
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
|
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
|
||||||
if mirror_tarballs != "0" or 'fullclone' in ud.parm:
|
if mirror_tarballs != "0" or 'fullclone' in ud.parm:
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
|
logger.info("Creating tarball of git repository")
|
||||||
runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
|
runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
|
||||||
|
|
||||||
if 'fullclone' in ud.parm:
|
if 'fullclone' in ud.parm:
|
||||||
|
@ -185,7 +186,7 @@ class Git(Fetch):
|
||||||
runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
|
runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
|
||||||
|
|
||||||
os.chdir(codir)
|
os.chdir(codir)
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
|
logger.info("Creating tarball of git checkout")
|
||||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
|
runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
|
||||||
|
|
||||||
os.chdir(ud.clonedir)
|
os.chdir(ud.clonedir)
|
||||||
|
@ -238,7 +239,7 @@ class Git(Fetch):
|
||||||
print("no repo")
|
print("no repo")
|
||||||
self.go(None, ud, d)
|
self.go(None, ud, d)
|
||||||
if not os.path.exists(ud.clonedir):
|
if not os.path.exists(ud.clonedir):
|
||||||
bb.msg.error(bb.msg.domain.Fetcher, "GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value" % (url, ud.clonedir))
|
logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -250,5 +251,5 @@ class Git(Fetch):
|
||||||
os.chdir(cwd)
|
os.chdir(cwd)
|
||||||
|
|
||||||
buildindex = "%s" % output.split()[0]
|
buildindex = "%s" % output.split()[0]
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, ud.clonedir, buildindex, rev))
|
logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
|
||||||
return buildindex
|
return buildindex
|
||||||
|
|
|
@ -26,12 +26,14 @@ BitBake 'Fetch' implementation for mercurial DRCS (hg).
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch
|
||||||
from bb.fetch import FetchError
|
from bb.fetch import FetchError
|
||||||
from bb.fetch import MissingParameterError
|
from bb.fetch import MissingParameterError
|
||||||
from bb.fetch import runfetchcmd
|
from bb.fetch import runfetchcmd
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
class Hg(Fetch):
|
class Hg(Fetch):
|
||||||
"""Class to fetch a from mercurial repositories"""
|
"""Class to fetch a from mercurial repositories"""
|
||||||
|
@ -116,29 +118,29 @@ class Hg(Fetch):
|
||||||
def go(self, loc, ud, d):
|
def go(self, loc, ud, d):
|
||||||
"""Fetch url"""
|
"""Fetch url"""
|
||||||
|
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
|
logger.info("Update " + loc)
|
||||||
# update sources there
|
# update sources there
|
||||||
os.chdir(ud.moddir)
|
os.chdir(ud.moddir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
|
logger.debug(1, "Running %s", updatecmd)
|
||||||
runfetchcmd(updatecmd, d)
|
runfetchcmd(updatecmd, d)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
logger.info("Fetch " + loc)
|
||||||
# check out sources there
|
# check out sources there
|
||||||
bb.mkdirhier(ud.pkgdir)
|
bb.mkdirhier(ud.pkgdir)
|
||||||
os.chdir(ud.pkgdir)
|
os.chdir(ud.pkgdir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd)
|
logger.debug(1, "Running %s", fetchcmd)
|
||||||
runfetchcmd(fetchcmd, d)
|
runfetchcmd(fetchcmd, d)
|
||||||
|
|
||||||
# Even when we clone (fetch), we still need to update as hg's clone
|
# Even when we clone (fetch), we still need to update as hg's clone
|
||||||
# won't checkout the specified revision if its on a branch
|
# won't checkout the specified revision if its on a branch
|
||||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
|
logger.debug(1, "Running %s", updatecmd)
|
||||||
runfetchcmd(updatecmd, d)
|
runfetchcmd(updatecmd, d)
|
||||||
|
|
||||||
os.chdir(ud.pkgdir)
|
os.chdir(ud.pkgdir)
|
||||||
|
|
|
@ -66,7 +66,7 @@ class Local(Fetch):
|
||||||
Check the status of the url
|
Check the status of the url
|
||||||
"""
|
"""
|
||||||
if urldata.localpath.find("*") != -1:
|
if urldata.localpath.find("*") != -1:
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url)
|
logger.info("URL %s looks like a glob and was therefore not checked.", url)
|
||||||
return True
|
return True
|
||||||
if os.path.exists(urldata.localpath):
|
if os.path.exists(urldata.localpath):
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -8,6 +8,7 @@ Based on the svn "Fetch" implementation.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch
|
||||||
|
@ -91,22 +92,22 @@ class Osc(Fetch):
|
||||||
Fetch url
|
Fetch url
|
||||||
"""
|
"""
|
||||||
|
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
||||||
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update "+ loc)
|
logger.info("Update "+ loc)
|
||||||
# update sources there
|
# update sources there
|
||||||
os.chdir(ud.moddir)
|
os.chdir(ud.moddir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd)
|
logger.debug(1, "Running %s", oscupdatecmd)
|
||||||
runfetchcmd(oscupdatecmd, d)
|
runfetchcmd(oscupdatecmd, d)
|
||||||
else:
|
else:
|
||||||
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
logger.info("Fetch " + loc)
|
||||||
# check out sources there
|
# check out sources there
|
||||||
bb.mkdirhier(ud.pkgdir)
|
bb.mkdirhier(ud.pkgdir)
|
||||||
os.chdir(ud.pkgdir)
|
os.chdir(ud.pkgdir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd)
|
logger.debug(1, "Running %s", oscfetchcmd)
|
||||||
runfetchcmd(oscfetchcmd, d)
|
runfetchcmd(oscfetchcmd, d)
|
||||||
|
|
||||||
os.chdir(os.path.join(ud.pkgdir + ud.path))
|
os.chdir(os.path.join(ud.pkgdir + ud.path))
|
||||||
|
|
|
@ -27,10 +27,12 @@ BitBake build tools.
|
||||||
|
|
||||||
from future_builtins import zip
|
from future_builtins import zip
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch
|
||||||
from bb.fetch import FetchError
|
from bb.fetch import FetchError
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
class Perforce(Fetch):
|
class Perforce(Fetch):
|
||||||
def supports(self, url, ud, d):
|
def supports(self, url, ud, d):
|
||||||
|
@ -86,10 +88,10 @@ class Perforce(Fetch):
|
||||||
depot += "@%s" % (p4date)
|
depot += "@%s" % (p4date)
|
||||||
|
|
||||||
p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
|
p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||||
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||||
cset = p4file.readline().strip()
|
cset = p4file.readline().strip()
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
|
logger.debug(1, "READ %s", cset)
|
||||||
if not cset:
|
if not cset:
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
|
@ -155,13 +157,13 @@ class Perforce(Fetch):
|
||||||
p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
||||||
|
|
||||||
# create temp directory
|
# create temp directory
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
|
logger.debug(2, "Fetch: creating temporary directory")
|
||||||
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
|
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||||
tmpfile = tmppipe.readline().strip()
|
tmpfile = tmppipe.readline().strip()
|
||||||
if not tmpfile:
|
if not tmpfile:
|
||||||
bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||||
raise FetchError(module)
|
raise FetchError(module)
|
||||||
|
|
||||||
if "label" in parm:
|
if "label" in parm:
|
||||||
|
@ -171,12 +173,12 @@ class Perforce(Fetch):
|
||||||
depot = "%s@%s" % (depot, cset)
|
depot = "%s@%s" % (depot, cset)
|
||||||
|
|
||||||
os.chdir(tmpfile)
|
os.chdir(tmpfile)
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
logger.info("Fetch " + loc)
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "%s%s files %s" % (p4cmd, p4opt, depot))
|
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||||
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||||
|
|
||||||
if not p4file:
|
if not p4file:
|
||||||
bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to get the P4 files from %s" % (depot))
|
logger.error("Fetch: unable to get the P4 files from %s", depot)
|
||||||
raise FetchError(module)
|
raise FetchError(module)
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
|
@ -194,7 +196,7 @@ class Perforce(Fetch):
|
||||||
count = count + 1
|
count = count + 1
|
||||||
|
|
||||||
if count == 0:
|
if count == 0:
|
||||||
bb.msg.error(bb.msg.domain.Fetcher, "Fetch: No files gathered from the P4 fetch")
|
logger.error("Fetch: No files gathered from the P4 fetch")
|
||||||
raise FetchError(module)
|
raise FetchError(module)
|
||||||
|
|
||||||
myret = os.system("tar -czf %s %s" % (ud.localpath, module))
|
myret = os.system("tar -czf %s %s" % (ud.localpath, module))
|
||||||
|
|
|
@ -72,7 +72,7 @@ class Repo(Fetch):
|
||||||
"""Fetch url"""
|
"""Fetch url"""
|
||||||
|
|
||||||
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
|
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping repo init / sync." % ud.localpath)
|
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
|
||||||
return
|
return
|
||||||
|
|
||||||
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
|
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
|
||||||
|
|
|
@ -26,11 +26,13 @@ This implementation is for svk. It is based on the svn implementation
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch
|
||||||
from bb.fetch import FetchError
|
from bb.fetch import FetchError
|
||||||
from bb.fetch import MissingParameterError
|
from bb.fetch import MissingParameterError
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
class Svk(Fetch):
|
class Svk(Fetch):
|
||||||
"""Class to fetch a module or modules from svk repositories"""
|
"""Class to fetch a module or modules from svk repositories"""
|
||||||
|
@ -72,19 +74,19 @@ class Svk(Fetch):
|
||||||
# create temp directory
|
# create temp directory
|
||||||
localdata = data.createCopy(d)
|
localdata = data.createCopy(d)
|
||||||
data.update_data(localdata)
|
data.update_data(localdata)
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
|
logger.debug(2, "Fetch: creating temporary directory")
|
||||||
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
|
bb.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
||||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||||
tmpfile = tmppipe.readline().strip()
|
tmpfile = tmppipe.readline().strip()
|
||||||
if not tmpfile:
|
if not tmpfile:
|
||||||
bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||||
raise FetchError(ud.module)
|
raise FetchError(ud.module)
|
||||||
|
|
||||||
# check out sources there
|
# check out sources there
|
||||||
os.chdir(tmpfile)
|
os.chdir(tmpfile)
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
logger.info("Fetch " + loc)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
|
logger.debug(1, "Running %s", svkcmd)
|
||||||
myret = os.system(svkcmd)
|
myret = os.system(svkcmd)
|
||||||
if myret != 0:
|
if myret != 0:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -25,12 +25,14 @@ BitBake 'Fetch' implementation for svn.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch
|
||||||
from bb.fetch import FetchError
|
from bb.fetch import FetchError
|
||||||
from bb.fetch import MissingParameterError
|
from bb.fetch import MissingParameterError
|
||||||
from bb.fetch import runfetchcmd
|
from bb.fetch import runfetchcmd
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
class Svn(Fetch):
|
class Svn(Fetch):
|
||||||
"""Class to fetch a module or modules from svn repositories"""
|
"""Class to fetch a module or modules from svn repositories"""
|
||||||
|
@ -136,22 +138,22 @@ class Svn(Fetch):
|
||||||
def go(self, loc, ud, d):
|
def go(self, loc, ud, d):
|
||||||
"""Fetch url"""
|
"""Fetch url"""
|
||||||
|
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
|
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
|
||||||
svnupdatecmd = self._buildsvncommand(ud, d, "update")
|
svnupdatecmd = self._buildsvncommand(ud, d, "update")
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
|
logger.info("Update " + loc)
|
||||||
# update sources there
|
# update sources there
|
||||||
os.chdir(ud.moddir)
|
os.chdir(ud.moddir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd)
|
logger.debug(1, "Running %s", svnupdatecmd)
|
||||||
runfetchcmd(svnupdatecmd, d)
|
runfetchcmd(svnupdatecmd, d)
|
||||||
else:
|
else:
|
||||||
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
|
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
|
logger.info("Fetch " + loc)
|
||||||
# check out sources there
|
# check out sources there
|
||||||
bb.mkdirhier(ud.pkgdir)
|
bb.mkdirhier(ud.pkgdir)
|
||||||
os.chdir(ud.pkgdir)
|
os.chdir(ud.pkgdir)
|
||||||
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd)
|
logger.debug(1, "Running %s", svnfetchcmd)
|
||||||
runfetchcmd(svnfetchcmd, d)
|
runfetchcmd(svnfetchcmd, d)
|
||||||
|
|
||||||
os.chdir(ud.pkgdir)
|
os.chdir(ud.pkgdir)
|
||||||
|
@ -179,7 +181,7 @@ class Svn(Fetch):
|
||||||
"""
|
"""
|
||||||
Return the latest upstream revision number
|
Return the latest upstream revision number
|
||||||
"""
|
"""
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url)
|
logger.debug(2, "SVN fetcher hitting network for %s", url)
|
||||||
|
|
||||||
output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
|
output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
|
||||||
|
|
||||||
|
|
|
@ -26,13 +26,11 @@ BitBake build tools.
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
import urllib
|
import urllib
|
||||||
from bb import data
|
from bb import data
|
||||||
from bb.fetch import Fetch
|
from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
|
||||||
from bb.fetch import FetchError
|
|
||||||
from bb.fetch import encodeurl, decodeurl
|
|
||||||
from bb.fetch import runfetchcmd
|
|
||||||
|
|
||||||
class Wget(Fetch):
|
class Wget(Fetch):
|
||||||
"""Class to fetch urls via 'wget'"""
|
"""Class to fetch urls via 'wget'"""
|
||||||
|
@ -69,15 +67,14 @@ class Wget(Fetch):
|
||||||
|
|
||||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||||
|
logger.info("fetch " + uri)
|
||||||
bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
|
logger.debug(2, "executing " + fetchcmd)
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
|
|
||||||
runfetchcmd(fetchcmd, d)
|
runfetchcmd(fetchcmd, d)
|
||||||
|
|
||||||
# Sanity check since wget can pretend it succeed when it didn't
|
# Sanity check since wget can pretend it succeed when it didn't
|
||||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||||
if not os.path.exists(ud.localpath) and not checkonly:
|
if not os.path.exists(ud.localpath) and not checkonly:
|
||||||
bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath))
|
logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -111,19 +111,25 @@ def set_debug_domains(domainargs):
|
||||||
def debug(level, msgdomain, msg, fn = None):
|
def debug(level, msgdomain, msg, fn = None):
|
||||||
level = logging.DEBUG - (level - 1)
|
level = logging.DEBUG - (level - 1)
|
||||||
if not msgdomain:
|
if not msgdomain:
|
||||||
logger.log(level, msg)
|
logger.debug(level, msg)
|
||||||
else:
|
else:
|
||||||
loggers[msgdomain].log(level, msg)
|
loggers[msgdomain].debug(level, msg)
|
||||||
|
|
||||||
def plain(msg, fn = None):
|
def plain(msg, fn = None):
|
||||||
logger.log(logging.INFO + 1, msg)
|
logger.log(logging.INFO + 1, msg)
|
||||||
|
logger.plain(msg)
|
||||||
|
|
||||||
def note(level, msgdomain, msg, fn = None):
|
def note(level, msgdomain, msg, fn = None):
|
||||||
level = logging.INFO - (level - 1)
|
if level > 1:
|
||||||
if not msgdomain:
|
if msgdomain:
|
||||||
logger.log(level, msg)
|
logger.verbose(msg)
|
||||||
else:
|
else:
|
||||||
loggers[msgdomain].log(level, msg)
|
loggers[msgdomain].verbose(msg)
|
||||||
|
else:
|
||||||
|
if msgdomain:
|
||||||
|
logger.info(msg)
|
||||||
|
else:
|
||||||
|
loggers[msgdomain].info(msg)
|
||||||
|
|
||||||
def warn(msgdomain, msg, fn = None):
|
def warn(msgdomain, msg, fn = None):
|
||||||
if not msgdomain:
|
if not msgdomain:
|
||||||
|
|
|
@ -26,9 +26,14 @@ File parsers for the BitBake build tools.
|
||||||
|
|
||||||
handlers = []
|
handlers = []
|
||||||
|
|
||||||
import bb, os
|
import os
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
import bb.utils
|
import bb.utils
|
||||||
import bb.siggen
|
import bb.siggen
|
||||||
|
import bb.utils
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Parsing")
|
||||||
|
|
||||||
class ParseError(Exception):
|
class ParseError(Exception):
|
||||||
"""Exception raised when parsing fails"""
|
"""Exception raised when parsing fails"""
|
||||||
|
@ -91,7 +96,7 @@ def resolve_file(fn, d):
|
||||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||||
fn = newfn
|
fn = newfn
|
||||||
|
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "LOAD %s" % fn)
|
logger.debug(2, "LOAD %s", fn)
|
||||||
return fn
|
return fn
|
||||||
|
|
||||||
# Used by OpenEmbedded metadata
|
# Used by OpenEmbedded metadata
|
||||||
|
|
|
@ -23,9 +23,13 @@
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
from future_builtins import filter
|
from future_builtins import filter
|
||||||
import bb, re, string
|
import re
|
||||||
from bb import methodpool
|
import string
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
import itertools
|
import itertools
|
||||||
|
from bb import methodpool
|
||||||
|
from bb.parse import logger
|
||||||
|
|
||||||
__word__ = re.compile(r"\S+")
|
__word__ = re.compile(r"\S+")
|
||||||
__parsed_methods__ = bb.methodpool.get_parsed_dict()
|
__parsed_methods__ = bb.methodpool.get_parsed_dict()
|
||||||
|
@ -51,7 +55,7 @@ class IncludeNode(AstNode):
|
||||||
Include the file and evaluate the statements
|
Include the file and evaluate the statements
|
||||||
"""
|
"""
|
||||||
s = bb.data.expand(self.what_file, data)
|
s = bb.data.expand(self.what_file, data)
|
||||||
bb.msg.debug(3, bb.msg.domain.Parsing, "CONF %s:%d: including %s" % (self.from_fn, self.from_lineno, s))
|
logger.debug(2, "CONF %s:%s: including %s", self.from_fn, self.from_lineno, s)
|
||||||
|
|
||||||
# TODO: Cache those includes... maybe not here though
|
# TODO: Cache those includes... maybe not here though
|
||||||
if self.force:
|
if self.force:
|
||||||
|
@ -365,7 +369,7 @@ def _expand_versions(versions):
|
||||||
def multi_finalize(fn, d):
|
def multi_finalize(fn, d):
|
||||||
appends = (d.getVar("__BBAPPEND", True) or "").split()
|
appends = (d.getVar("__BBAPPEND", True) or "").split()
|
||||||
for append in appends:
|
for append in appends:
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "Appending .bbappend file " + append + " to " + fn)
|
logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
|
||||||
bb.parse.BBHandler.handle(append, d, True)
|
bb.parse.BBHandler.handle(append, d, True)
|
||||||
|
|
||||||
safe_d = d
|
safe_d = d
|
||||||
|
|
|
@ -27,11 +27,12 @@
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import re, bb, os
|
import re, bb, os
|
||||||
|
import logging
|
||||||
import bb.fetch, bb.build, bb.utils
|
import bb.fetch, bb.build, bb.utils
|
||||||
from bb import data
|
from bb import data
|
||||||
|
|
||||||
from . import ConfHandler
|
from . import ConfHandler
|
||||||
from .. import resolve_file, ast
|
from .. import resolve_file, ast, logger
|
||||||
from .ConfHandler import include, init
|
from .ConfHandler import include, init
|
||||||
|
|
||||||
# For compatibility
|
# For compatibility
|
||||||
|
@ -76,7 +77,7 @@ def inherit(files, d):
|
||||||
file = os.path.join('classes', '%s.bbclass' % file)
|
file = os.path.join('classes', '%s.bbclass' % file)
|
||||||
|
|
||||||
if not file in __inherit_cache:
|
if not file in __inherit_cache:
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file))
|
logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
|
||||||
__inherit_cache.append( file )
|
__inherit_cache.append( file )
|
||||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||||
include(fn, file, d, "inherit")
|
include(fn, file, d, "inherit")
|
||||||
|
@ -115,9 +116,9 @@ def handle(fn, d, include):
|
||||||
|
|
||||||
|
|
||||||
if include == 0:
|
if include == 0:
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)")
|
logger.debug(2, "BB %s: handle(data)", fn)
|
||||||
else:
|
else:
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)")
|
logger.debug(2, "BB %s: handle(data, include)", fn)
|
||||||
|
|
||||||
(root, ext) = os.path.splitext(os.path.basename(fn))
|
(root, ext) = os.path.splitext(os.path.basename(fn))
|
||||||
base_name = "%s%s" % (root, ext)
|
base_name = "%s%s" % (root, ext)
|
||||||
|
|
|
@ -25,8 +25,9 @@
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
import re, bb.data, os
|
import re, bb.data, os
|
||||||
|
import logging
|
||||||
import bb.utils
|
import bb.utils
|
||||||
from bb.parse import ParseError, resolve_file, ast
|
from bb.parse import ParseError, resolve_file, ast, logger
|
||||||
|
|
||||||
#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
|
||||||
|
@ -68,7 +69,7 @@ def include(oldfn, fn, data, error_out):
|
||||||
except IOError:
|
except IOError:
|
||||||
if error_out:
|
if error_out:
|
||||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
|
raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
|
||||||
bb.msg.debug(2, bb.msg.domain.Parsing, "CONF file '%s' not found" % fn)
|
logger.debug(2, "CONF file '%s' not found", fn)
|
||||||
|
|
||||||
def handle(fn, data, include):
|
def handle(fn, data, include):
|
||||||
init(data)
|
init(data)
|
||||||
|
|
|
@ -15,9 +15,13 @@
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
import bb, os
|
import os
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
import bb.utils
|
import bb.utils
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.PersistData")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import sqlite3
|
import sqlite3
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -56,7 +60,7 @@ class PersistData:
|
||||||
bb.utils.mkdirhier(self.cachedir)
|
bb.utils.mkdirhier(self.cachedir)
|
||||||
|
|
||||||
self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3")
|
self.cachefile = os.path.join(self.cachedir, "bb_persist_data.sqlite3")
|
||||||
bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
|
logger.debug(1, "Using '%s' as the persistent data cache", self.cachefile)
|
||||||
|
|
||||||
connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
|
connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
|
||||||
persistent_database_connection["connection"] = connection
|
persistent_database_connection["connection"] = connection
|
||||||
|
|
|
@ -22,9 +22,12 @@
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
import logging
|
||||||
from bb import data, utils
|
from bb import data, utils
|
||||||
import bb
|
import bb
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Provider")
|
||||||
|
|
||||||
class NoProvider(Exception):
|
class NoProvider(Exception):
|
||||||
"""Exception raised when no provider of a build dependency can be found"""
|
"""Exception raised when no provider of a build dependency can be found"""
|
||||||
|
|
||||||
|
@ -120,9 +123,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||||
if item:
|
if item:
|
||||||
itemstr = " (for item %s)" % item
|
itemstr = " (for item %s)" % item
|
||||||
if preferred_file is None:
|
if preferred_file is None:
|
||||||
bb.msg.note(1, bb.msg.domain.Provider, "preferred version %s of %s not available%s" % (pv_str, pn, itemstr))
|
logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
|
||||||
else:
|
else:
|
||||||
bb.msg.debug(1, bb.msg.domain.Provider, "selecting %s as PREFERRED_VERSION %s of package %s%s" % (preferred_file, pv_str, pn, itemstr))
|
logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
|
||||||
|
|
||||||
return (preferred_ver, preferred_file)
|
return (preferred_ver, preferred_file)
|
||||||
|
|
||||||
|
@ -189,7 +192,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
|
||||||
pkg_pn[pn] = []
|
pkg_pn[pn] = []
|
||||||
pkg_pn[pn].append(p)
|
pkg_pn[pn].append(p)
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys()))
|
logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
|
||||||
|
|
||||||
# First add PREFERRED_VERSIONS
|
# First add PREFERRED_VERSIONS
|
||||||
for pn in pkg_pn:
|
for pn in pkg_pn:
|
||||||
|
@ -206,7 +209,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
|
||||||
eligible.append(preferred_versions[pn][1])
|
eligible.append(preferred_versions[pn][1])
|
||||||
|
|
||||||
if len(eligible) == 0:
|
if len(eligible) == 0:
|
||||||
bb.msg.error(bb.msg.domain.Provider, "no eligible providers for %s" % item)
|
logger.error("no eligible providers for %s", item)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
# If pn == item, give it a slight default preference
|
# If pn == item, give it a slight default preference
|
||||||
|
@ -242,13 +245,13 @@ def filterProviders(providers, item, cfgData, dataCache):
|
||||||
for p in eligible:
|
for p in eligible:
|
||||||
pn = dataCache.pkg_fn[p]
|
pn = dataCache.pkg_fn[p]
|
||||||
if dataCache.preferred[item] == pn:
|
if dataCache.preferred[item] == pn:
|
||||||
bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy %s due to PREFERRED_PROVIDERS" % (pn, item))
|
logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
|
||||||
eligible.remove(p)
|
eligible.remove(p)
|
||||||
eligible = [p] + eligible
|
eligible = [p] + eligible
|
||||||
foundUnique = True
|
foundUnique = True
|
||||||
break
|
break
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
|
logger.debug(1, "sorted providers for %s are: %s", item, eligible)
|
||||||
|
|
||||||
return eligible, foundUnique
|
return eligible, foundUnique
|
||||||
|
|
||||||
|
@ -268,11 +271,11 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||||
pn = dataCache.pkg_fn[p]
|
pn = dataCache.pkg_fn[p]
|
||||||
provides = dataCache.pn_provides[pn]
|
provides = dataCache.pn_provides[pn]
|
||||||
for provide in provides:
|
for provide in provides:
|
||||||
bb.msg.note(2, bb.msg.domain.Provider, "checking PREFERRED_PROVIDER_%s" % (provide))
|
logger.verbose("checking PREFERRED_PROVIDER_%s", provide)
|
||||||
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
|
prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
|
||||||
if prefervar == pn:
|
if prefervar == pn:
|
||||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||||
bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy runtime %s due to %s" % (pn, item, var))
|
logger.verbose("selecting %s to satisfy runtime %s due to %s", pn, item, var)
|
||||||
preferred_vars.append(var)
|
preferred_vars.append(var)
|
||||||
eligible.remove(p)
|
eligible.remove(p)
|
||||||
eligible = [p] + eligible
|
eligible = [p] + eligible
|
||||||
|
@ -282,9 +285,9 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||||
numberPreferred = len(preferred)
|
numberPreferred = len(preferred)
|
||||||
|
|
||||||
if numberPreferred > 1:
|
if numberPreferred > 1:
|
||||||
bb.msg.error(bb.msg.domain.Provider, "Conflicting PREFERRED_PROVIDER entries were found which resulted in an attempt to select multiple providers (%s) for runtime dependecy %s\nThe entries resulting in this conflict were: %s" % (preferred, item, preferred_vars))
|
logger.error("Conflicting PREFERRED_PROVIDER entries were found which resulted in an attempt to select multiple providers (%s) for runtime dependecy %s\nThe entries resulting in this conflict were: %s", preferred, item, preferred_vars)
|
||||||
|
|
||||||
bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
|
logger.debug(1, "sorted providers for %s are: %s", item, eligible)
|
||||||
|
|
||||||
return eligible, numberPreferred
|
return eligible, numberPreferred
|
||||||
|
|
||||||
|
@ -314,7 +317,7 @@ def getRuntimeProviders(dataCache, rdepend):
|
||||||
try:
|
try:
|
||||||
regexp = re.compile(pattern)
|
regexp = re.compile(pattern)
|
||||||
except:
|
except:
|
||||||
bb.msg.error(bb.msg.domain.Provider, "Error parsing re expression: %s" % pattern)
|
logger.error("Error parsing regular expression '%s'", pattern)
|
||||||
raise
|
raise
|
||||||
regexp_cache[pattern] = regexp
|
regexp_cache[pattern] = regexp
|
||||||
if regexp.match(rdepend):
|
if regexp.match(rdepend):
|
||||||
|
|
|
@ -22,22 +22,25 @@ Handles preparation and execution of a queue of tasks
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
import bb, os, sys
|
import os
|
||||||
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import signal
|
import signal
|
||||||
import stat
|
import stat
|
||||||
import fcntl
|
import fcntl
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
|
import bb
|
||||||
from bb import msg, data, event
|
from bb import msg, data, event
|
||||||
|
|
||||||
bblogger = logging.getLogger("BitBake")
|
bblogger = logging.getLogger("BitBake")
|
||||||
|
logger = logging.getLogger("BitBake.RunQueue")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import pickle
|
import pickle
|
||||||
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
|
logger.info("Importing cPickle failed. Falling back to a very slow implementation.")
|
||||||
|
|
||||||
class RunQueueStats:
|
class RunQueueStats:
|
||||||
"""
|
"""
|
||||||
|
@ -363,14 +366,14 @@ class RunQueueData:
|
||||||
for task in range(numTasks):
|
for task in range(numTasks):
|
||||||
if task_done[task] is False or deps_left[task] != 0:
|
if task_done[task] is False or deps_left[task] != 0:
|
||||||
problem_tasks.append(task)
|
problem_tasks.append(task)
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s) is not buildable\n" % (task, self.get_user_idstring(task)))
|
logger.debug(2, "Task %s (%s) is not buildable\n", task, self.get_user_idstring(task))
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "(Complete marker was %s and the remaining dependency count was %s)\n\n" % (task_done[task], deps_left[task]))
|
logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n\n", task_done[task], deps_left[task])
|
||||||
|
|
||||||
if problem_tasks:
|
if problem_tasks:
|
||||||
message = "Unbuildable tasks were found.\n"
|
message = "Unbuildable tasks were found.\n"
|
||||||
message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
|
message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
|
||||||
message = message + "Identifying dependency loops (this may take a short while)...\n"
|
message = message + "Identifying dependency loops (this may take a short while)...\n"
|
||||||
bb.msg.error(bb.msg.domain.RunQueue, message)
|
logger.error(message)
|
||||||
|
|
||||||
msgs = self.circular_depchains_handler(problem_tasks)
|
msgs = self.circular_depchains_handler(problem_tasks)
|
||||||
|
|
||||||
|
@ -398,7 +401,7 @@ class RunQueueData:
|
||||||
# Nothing to do
|
# Nothing to do
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue, "Preparing runqueue")
|
logger.info("Preparing runqueue")
|
||||||
|
|
||||||
# Step A - Work out a list of tasks to run
|
# Step A - Work out a list of tasks to run
|
||||||
#
|
#
|
||||||
|
@ -445,7 +448,7 @@ class RunQueueData:
|
||||||
fn = taskData.fn_index[fnid]
|
fn = taskData.fn_index[fnid]
|
||||||
task_deps = self.dataCache.task_deps[fn]
|
task_deps = self.dataCache.task_deps[fn]
|
||||||
|
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Processing %s:%s" %(fn, taskData.tasks_name[task]))
|
logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
|
||||||
|
|
||||||
if fnid not in taskData.failed_fnids:
|
if fnid not in taskData.failed_fnids:
|
||||||
|
|
||||||
|
@ -505,7 +508,7 @@ class RunQueueData:
|
||||||
# Rmove all self references
|
# Rmove all self references
|
||||||
if task in depends:
|
if task in depends:
|
||||||
newdep = []
|
newdep = []
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s %s) contains self reference! %s" % (task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends))
|
logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)
|
||||||
for dep in depends:
|
for dep in depends:
|
||||||
if task != dep:
|
if task != dep:
|
||||||
newdep.append(dep)
|
newdep.append(dep)
|
||||||
|
@ -567,7 +570,7 @@ class RunQueueData:
|
||||||
# as active too. If the task is to be 'forced', clear its stamp. Once
|
# as active too. If the task is to be 'forced', clear its stamp. Once
|
||||||
# all active tasks are marked, prune the ones we don't need.
|
# all active tasks are marked, prune the ones we don't need.
|
||||||
|
|
||||||
bb.msg.note(2, bb.msg.domain.RunQueue, "Marking Active Tasks")
|
logger.verbose("Marking Active Tasks")
|
||||||
|
|
||||||
def mark_active(listid, depth):
|
def mark_active(listid, depth):
|
||||||
"""
|
"""
|
||||||
|
@ -600,7 +603,7 @@ class RunQueueData:
|
||||||
|
|
||||||
# Remove stamps for targets if force mode active
|
# Remove stamps for targets if force mode active
|
||||||
if self.cooker.configuration.force:
|
if self.cooker.configuration.force:
|
||||||
bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (target[1], fn))
|
logger.verbose("Remove stamp %s, %s", target[1], fn)
|
||||||
bb.build.del_stamp(target[1], self.dataCache, fn)
|
bb.build.del_stamp(target[1], self.dataCache, fn)
|
||||||
|
|
||||||
if fnid in taskData.failed_fnids:
|
if fnid in taskData.failed_fnids:
|
||||||
|
@ -643,7 +646,7 @@ class RunQueueData:
|
||||||
else:
|
else:
|
||||||
bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.")
|
bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.")
|
||||||
|
|
||||||
bb.msg.note(2, bb.msg.domain.RunQueue, "Pruned %s inactive tasks, %s left" % (delcount, len(self.runq_fnid)))
|
logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
|
||||||
|
|
||||||
# Remap the dependencies to account for the deleted tasks
|
# Remap the dependencies to account for the deleted tasks
|
||||||
# Check we didn't delete a task we depend on
|
# Check we didn't delete a task we depend on
|
||||||
|
@ -656,7 +659,7 @@ class RunQueueData:
|
||||||
newdeps.append(maps[origdep])
|
newdeps.append(maps[origdep])
|
||||||
self.runq_depends[listid] = set(newdeps)
|
self.runq_depends[listid] = set(newdeps)
|
||||||
|
|
||||||
bb.msg.note(2, bb.msg.domain.RunQueue, "Assign Weightings")
|
logger.verbose("Assign Weightings")
|
||||||
|
|
||||||
# Generate a list of reverse dependencies to ease future calculations
|
# Generate a list of reverse dependencies to ease future calculations
|
||||||
for listid in range(len(self.runq_fnid)):
|
for listid in range(len(self.runq_fnid)):
|
||||||
|
@ -675,7 +678,7 @@ class RunQueueData:
|
||||||
#self.dump_data(taskData)
|
#self.dump_data(taskData)
|
||||||
bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
|
bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
|
||||||
|
|
||||||
bb.msg.note(2, bb.msg.domain.RunQueue, "Compute totals (have %s endpoint(s))" % len(endpoints))
|
logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
|
||||||
|
|
||||||
# Calculate task weights
|
# Calculate task weights
|
||||||
# Check of higher length circular dependencies
|
# Check of higher length circular dependencies
|
||||||
|
@ -698,9 +701,7 @@ class RunQueueData:
|
||||||
for prov in prov_list:
|
for prov in prov_list:
|
||||||
if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
|
if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
|
||||||
error = True
|
error = True
|
||||||
bb.msg.error(bb.msg.domain.RunQueue, "Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should." % (prov, " ".join(prov_list[prov])))
|
logger.error("Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should.", prov, " ".join(prov_list[prov]))
|
||||||
#if error:
|
|
||||||
# bb.msg.fatal(bb.msg.domain.RunQueue, "Corrupted metadata configuration detected, aborting...")
|
|
||||||
|
|
||||||
|
|
||||||
# Create a whitelist usable by the stamp checks
|
# Create a whitelist usable by the stamp checks
|
||||||
|
@ -762,20 +763,20 @@ class RunQueueData:
|
||||||
"""
|
"""
|
||||||
Dump some debug information on the internal data structures
|
Dump some debug information on the internal data structures
|
||||||
"""
|
"""
|
||||||
bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:")
|
logger.debug(3, "run_tasks:")
|
||||||
for task in range(len(self.rqdata.runq_task)):
|
for task in range(len(self.rqdata.runq_task)):
|
||||||
bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
|
logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
|
||||||
taskQueue.fn_index[self.rqdata.runq_fnid[task]],
|
taskQueue.fn_index[self.rqdata.runq_fnid[task]],
|
||||||
self.rqdata.runq_task[task],
|
self.rqdata.runq_task[task],
|
||||||
self.rqdata.runq_weight[task],
|
self.rqdata.runq_weight[task],
|
||||||
self.rqdata.runq_depends[task],
|
self.rqdata.runq_depends[task],
|
||||||
self.rqdata.runq_revdeps[task]))
|
self.rqdata.runq_revdeps[task]))
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:")
|
logger.debug(3, "sorted_tasks:")
|
||||||
for task1 in range(len(self.rqdata.runq_task)):
|
for task1 in range(len(self.rqdata.runq_task)):
|
||||||
if task1 in self.prio_map:
|
if task1 in self.prio_map:
|
||||||
task = self.prio_map[task1]
|
task = self.prio_map[task1]
|
||||||
bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
|
logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
|
||||||
taskQueue.fn_index[self.rqdata.runq_fnid[task]],
|
taskQueue.fn_index[self.rqdata.runq_fnid[task]],
|
||||||
self.rqdata.runq_task[task],
|
self.rqdata.runq_task[task],
|
||||||
self.rqdata.runq_weight[task],
|
self.rqdata.runq_weight[task],
|
||||||
|
@ -913,12 +914,12 @@ class RunQueue:
|
||||||
|
|
||||||
# If the stamp is missing its not current
|
# If the stamp is missing its not current
|
||||||
if not os.access(stampfile, os.F_OK):
|
if not os.access(stampfile, os.F_OK):
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s not available\n" % stampfile)
|
logger.debug(2, "Stampfile %s not available\n" % stampfile)
|
||||||
return False
|
return False
|
||||||
# If its a 'nostamp' task, it's not current
|
# If its a 'nostamp' task, it's not current
|
||||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "%s.%s is nostamp\n" % (fn, taskname))
|
logger.debug(2, "%s.%s is nostamp\n" % (fn, taskname))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if taskname.endswith("_setscene"):
|
if taskname.endswith("_setscene"):
|
||||||
|
@ -938,10 +939,10 @@ class RunQueue:
|
||||||
continue
|
continue
|
||||||
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
|
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
|
||||||
if not t2:
|
if not t2:
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s does not exist" % (stampfile2))
|
logger.debug(2, "Stampfile %s does not exist" % (stampfile2))
|
||||||
iscurrent = False
|
iscurrent = False
|
||||||
if t1 < t2:
|
if t1 < t2:
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s < %s" % (stampfile, stampfile2))
|
logger.debug(2, "Stampfile %s < %s" % (stampfile, stampfile2))
|
||||||
iscurrent = False
|
iscurrent = False
|
||||||
|
|
||||||
return iscurrent
|
return iscurrent
|
||||||
|
@ -972,7 +973,7 @@ class RunQueue:
|
||||||
retval = self.rqexe.execute()
|
retval = self.rqexe.execute()
|
||||||
|
|
||||||
if self.state is runQueueRunInit:
|
if self.state is runQueueRunInit:
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue, "Executing RunQueue Tasks")
|
logger.info("Executing RunQueue Tasks")
|
||||||
self.rqexe = RunQueueExecuteTasks(self)
|
self.rqexe = RunQueueExecuteTasks(self)
|
||||||
self.state = runQueueRunning
|
self.state = runQueueRunning
|
||||||
|
|
||||||
|
@ -991,7 +992,7 @@ class RunQueue:
|
||||||
|
|
||||||
if self.state is runQueueComplete:
|
if self.state is runQueueComplete:
|
||||||
# All done
|
# All done
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue, "Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed))
|
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.state is runQueueChildProcess:
|
if self.state is runQueueChildProcess:
|
||||||
|
@ -1059,7 +1060,7 @@ class RunQueueExecute:
|
||||||
|
|
||||||
def finish_now(self):
|
def finish_now(self):
|
||||||
if self.stats.active:
|
if self.stats.active:
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGTERM to remaining %s tasks" % self.stats.active)
|
logger.info("Sending SIGTERM to remaining %s tasks", self.stats.active)
|
||||||
for k, v in self.build_pids.iteritems():
|
for k, v in self.build_pids.iteritems():
|
||||||
try:
|
try:
|
||||||
os.kill(-k, signal.SIGTERM)
|
os.kill(-k, signal.SIGTERM)
|
||||||
|
@ -1101,7 +1102,7 @@ class RunQueueExecute:
|
||||||
fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split()
|
fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split()
|
||||||
for p in fakedirs:
|
for p in fakedirs:
|
||||||
bb.mkdirhier(p)
|
bb.mkdirhier(p)
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
|
logger.debug(2, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
|
||||||
|
|
||||||
env['PATH'] = self.cooker.configuration.initial_path
|
env['PATH'] = self.cooker.configuration.initial_path
|
||||||
|
|
||||||
|
@ -1220,7 +1221,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
for scheduler in self.rqdata.schedulers:
|
for scheduler in self.rqdata.schedulers:
|
||||||
if self.scheduler == scheduler.name:
|
if self.scheduler == scheduler.name:
|
||||||
self.sched = scheduler(self, self.rqdata)
|
self.sched = scheduler(self, self.rqdata)
|
||||||
bb.msg.debug(1, bb.msg.domain.RunQueue, "Using runqueue scheduler '%s'" % scheduler.name)
|
logger.debug(1, "Using runqueue scheduler '%s'" % scheduler.name)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
|
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
|
||||||
|
@ -1246,7 +1247,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
self.runq_buildable[revdep] = 1
|
self.runq_buildable[revdep] = 1
|
||||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
|
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
|
||||||
taskname = self.rqdata.runq_task[revdep]
|
taskname = self.rqdata.runq_task[revdep]
|
||||||
bb.msg.debug(1, bb.msg.domain.RunQueue, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname))
|
logger.debug(1, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname))
|
||||||
|
|
||||||
def task_complete(self, task):
|
def task_complete(self, task):
|
||||||
self.stats.taskCompleted()
|
self.stats.taskCompleted()
|
||||||
|
@ -1258,7 +1259,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
Called when a task has failed
|
Called when a task has failed
|
||||||
Updates the state engine with the failure
|
Updates the state engine with the failure
|
||||||
"""
|
"""
|
||||||
bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed with %s" % (task, self.rqdata.get_user_idstring(task), exitcode))
|
logger.error("Task %s (%s) failed with %s" % (task, self.rqdata.get_user_idstring(task), exitcode))
|
||||||
self.stats.taskFailed()
|
self.stats.taskFailed()
|
||||||
fnid = self.rqdata.runq_fnid[task]
|
fnid = self.rqdata.runq_fnid[task]
|
||||||
self.failed_fnids.append(fnid)
|
self.failed_fnids.append(fnid)
|
||||||
|
@ -1287,7 +1288,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
|
|
||||||
taskname = self.rqdata.runq_task[task]
|
taskname = self.rqdata.runq_task[task]
|
||||||
if self.rq.check_stamp_task(task, taskname):
|
if self.rq.check_stamp_task(task, taskname):
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.rqdata.get_user_idstring(task)))
|
logger.debug(2, "Stamp current task %s (%s)" % (task, self.rqdata.get_user_idstring(task)))
|
||||||
self.task_skip(task)
|
self.task_skip(task)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -1295,8 +1296,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
|
|
||||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||||
if 'noexec' in taskdep and taskname in taskdep['noexec']:
|
if 'noexec' in taskdep and taskname in taskdep['noexec']:
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue,
|
logger.info("Noexec task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
||||||
"Noexec task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
|
||||||
self.stats.total,
|
self.stats.total,
|
||||||
task,
|
task,
|
||||||
self.rqdata.get_user_idstring(task)))
|
self.rqdata.get_user_idstring(task)))
|
||||||
|
@ -1306,8 +1306,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
self.task_complete(task)
|
self.task_complete(task)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue,
|
logger.info("Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
||||||
"Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
|
||||||
self.stats.total,
|
self.stats.total,
|
||||||
task,
|
task,
|
||||||
self.rqdata.get_user_idstring(task)))
|
self.rqdata.get_user_idstring(task)))
|
||||||
|
@ -1334,11 +1333,11 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
# Sanity Checks
|
# Sanity Checks
|
||||||
for task in range(self.stats.total):
|
for task in range(self.stats.total):
|
||||||
if self.runq_buildable[task] == 0:
|
if self.runq_buildable[task] == 0:
|
||||||
bb.msg.error(bb.msg.domain.RunQueue, "Task %s never buildable!" % task)
|
logger.error("Task %s never buildable!" % task)
|
||||||
if self.runq_running[task] == 0:
|
if self.runq_running[task] == 0:
|
||||||
bb.msg.error(bb.msg.domain.RunQueue, "Task %s never ran!" % task)
|
logger.error("Task %s never ran!" % task)
|
||||||
if self.runq_complete[task] == 0:
|
if self.runq_complete[task] == 0:
|
||||||
bb.msg.error(bb.msg.domain.RunQueue, "Task %s never completed!" % task)
|
logger.error("Task %s never completed!" % task)
|
||||||
self.rq.state = runQueueComplete
|
self.rq.state = runQueueComplete
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -1461,12 +1460,12 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||||
|
|
||||||
for task in range(len(self.sq_revdeps)):
|
for task in range(len(self.sq_revdeps)):
|
||||||
if task not in valid_new and task not in noexec:
|
if task not in valid_new and task not in noexec:
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "No package found so skipping setscene task %s" % (self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
|
logger.debug(2, "No package found so skipping setscene task %s" % (self.rqdata.get_user_idstring(self.rqdata.runq_setscene[task])))
|
||||||
self.task_failoutright(task)
|
self.task_failoutright(task)
|
||||||
|
|
||||||
#print(str(valid))
|
#print(str(valid))
|
||||||
|
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue, "Executing SetScene Tasks")
|
logger.info("Executing SetScene Tasks")
|
||||||
|
|
||||||
self.rq.state = runQueueSceneRun
|
self.rq.state = runQueueSceneRun
|
||||||
|
|
||||||
|
@ -1484,7 +1483,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
index = self.rqdata.runq_setscene[task]
|
index = self.rqdata.runq_setscene[task]
|
||||||
bb.msg.debug(1, bb.msg.domain.RunQueue, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index))
|
logger.debug(1, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index))
|
||||||
|
|
||||||
self.scenequeue_covered.add(task)
|
self.scenequeue_covered.add(task)
|
||||||
self.scenequeue_updatecounters(task)
|
self.scenequeue_updatecounters(task)
|
||||||
|
@ -1539,7 +1538,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||||
|
|
||||||
taskname = self.rqdata.runq_task[realtask] + "_setscene"
|
taskname = self.rqdata.runq_task[realtask] + "_setscene"
|
||||||
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]):
|
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]):
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp for underlying task %s (%s) is current so skipping setscene varient" % (task, self.rqdata.get_user_idstring(task)))
|
logger.debug(2, "Stamp for underlying task %s (%s) is current so skipping setscene varient" % (task, self.rqdata.get_user_idstring(task)))
|
||||||
self.task_failoutright(task)
|
self.task_failoutright(task)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -1550,12 +1549,11 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if self.rq.check_stamp_task(realtask, taskname):
|
if self.rq.check_stamp_task(realtask, taskname):
|
||||||
bb.msg.debug(2, bb.msg.domain.RunQueue, "Setscene stamp current task %s (%s) so skip it and its dependencies" % (task, self.rqdata.get_user_idstring(realtask)))
|
logger.debug(2, "Setscene stamp current task %s (%s) so skip it and its dependencies" % (task, self.rqdata.get_user_idstring(realtask)))
|
||||||
self.task_skip(task)
|
self.task_skip(task)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
bb.msg.note(1, bb.msg.domain.RunQueue,
|
logger.info("Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
||||||
"Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
|
|
||||||
self.stats.total, fn, taskname))
|
self.stats.total, fn, taskname))
|
||||||
|
|
||||||
pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
|
pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
|
||||||
|
|
|
@ -23,8 +23,11 @@ Task data collection and handling
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
import logging
|
||||||
import bb
|
import bb
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.TaskData")
|
||||||
|
|
||||||
def re_match_strings(target, strings):
|
def re_match_strings(target, strings):
|
||||||
"""
|
"""
|
||||||
Whether or not the string 'target' matches
|
Whether or not the string 'target' matches
|
||||||
|
@ -182,7 +185,7 @@ class TaskData:
|
||||||
if not fnid in self.depids:
|
if not fnid in self.depids:
|
||||||
dependids = {}
|
dependids = {}
|
||||||
for depend in dataCache.deps[fn]:
|
for depend in dataCache.deps[fn]:
|
||||||
bb.msg.debug(2, bb.msg.domain.TaskData, "Added dependency %s for %s" % (depend, fn))
|
logger.debug(2, "Added dependency %s for %s", depend, fn)
|
||||||
dependids[self.getbuild_id(depend)] = None
|
dependids[self.getbuild_id(depend)] = None
|
||||||
self.depids[fnid] = dependids.keys()
|
self.depids[fnid] = dependids.keys()
|
||||||
|
|
||||||
|
@ -193,11 +196,11 @@ class TaskData:
|
||||||
rrecs = dataCache.runrecs[fn]
|
rrecs = dataCache.runrecs[fn]
|
||||||
for package in rdepends:
|
for package in rdepends:
|
||||||
for rdepend in bb.utils.explode_deps(rdepends[package]):
|
for rdepend in bb.utils.explode_deps(rdepends[package]):
|
||||||
bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime dependency %s for %s" % (rdepend, fn))
|
logger.debug(2, "Added runtime dependency %s for %s", rdepend, fn)
|
||||||
rdependids[self.getrun_id(rdepend)] = None
|
rdependids[self.getrun_id(rdepend)] = None
|
||||||
for package in rrecs:
|
for package in rrecs:
|
||||||
for rdepend in bb.utils.explode_deps(rrecs[package]):
|
for rdepend in bb.utils.explode_deps(rrecs[package]):
|
||||||
bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime recommendation %s for %s" % (rdepend, fn))
|
logger.debug(2, "Added runtime recommendation %s for %s", rdepend, fn)
|
||||||
rdependids[self.getrun_id(rdepend)] = None
|
rdependids[self.getrun_id(rdepend)] = None
|
||||||
self.rdepids[fnid] = rdependids.keys()
|
self.rdepids[fnid] = rdependids.keys()
|
||||||
|
|
||||||
|
@ -397,7 +400,7 @@ class TaskData:
|
||||||
fnid = self.getfn_id(fn)
|
fnid = self.getfn_id(fn)
|
||||||
if fnid in self.failed_fnids:
|
if fnid in self.failed_fnids:
|
||||||
continue
|
continue
|
||||||
bb.msg.debug(2, bb.msg.domain.Provider, "adding %s to satisfy %s" % (fn, item))
|
logger.debug(2, "adding %s to satisfy %s", fn, item)
|
||||||
self.add_build_target(fn, item)
|
self.add_build_target(fn, item)
|
||||||
self.add_tasks(fn, dataCache)
|
self.add_tasks(fn, dataCache)
|
||||||
|
|
||||||
|
@ -450,7 +453,7 @@ class TaskData:
|
||||||
fnid = self.getfn_id(fn)
|
fnid = self.getfn_id(fn)
|
||||||
if fnid in self.failed_fnids:
|
if fnid in self.failed_fnids:
|
||||||
continue
|
continue
|
||||||
bb.msg.debug(2, bb.msg.domain.Provider, "adding '%s' to satisfy runtime '%s'" % (fn, item))
|
logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
|
||||||
self.add_runtime_target(fn, item)
|
self.add_runtime_target(fn, item)
|
||||||
self.add_tasks(fn, dataCache)
|
self.add_tasks(fn, dataCache)
|
||||||
|
|
||||||
|
@ -463,7 +466,7 @@ class TaskData:
|
||||||
"""
|
"""
|
||||||
if fnid in self.failed_fnids:
|
if fnid in self.failed_fnids:
|
||||||
return
|
return
|
||||||
bb.msg.debug(1, bb.msg.domain.Provider, "File '%s' is unbuildable, removing..." % self.fn_index[fnid])
|
logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
|
||||||
self.failed_fnids.append(fnid)
|
self.failed_fnids.append(fnid)
|
||||||
for target in self.build_targets:
|
for target in self.build_targets:
|
||||||
if fnid in self.build_targets[target]:
|
if fnid in self.build_targets[target]:
|
||||||
|
@ -485,7 +488,7 @@ class TaskData:
|
||||||
missing_list = [self.build_names_index[targetid]]
|
missing_list = [self.build_names_index[targetid]]
|
||||||
else:
|
else:
|
||||||
missing_list = [self.build_names_index[targetid]] + missing_list
|
missing_list = [self.build_names_index[targetid]] + missing_list
|
||||||
bb.msg.note(2, bb.msg.domain.Provider, "Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list))
|
providerlog.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
|
||||||
self.failed_deps.append(targetid)
|
self.failed_deps.append(targetid)
|
||||||
dependees = self.get_dependees(targetid)
|
dependees = self.get_dependees(targetid)
|
||||||
for fnid in dependees:
|
for fnid in dependees:
|
||||||
|
@ -498,7 +501,7 @@ class TaskData:
|
||||||
|
|
||||||
if self.abort and targetid in self.external_targets:
|
if self.abort and targetid in self.external_targets:
|
||||||
target = self.build_names_index[targetid]
|
target = self.build_names_index[targetid]
|
||||||
bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (target, missing_list))
|
providerlog.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||||
raise bb.providers.NoProvider(target)
|
raise bb.providers.NoProvider(target)
|
||||||
|
|
||||||
def remove_runtarget(self, targetid, missing_list = []):
|
def remove_runtarget(self, targetid, missing_list = []):
|
||||||
|
@ -511,7 +514,7 @@ class TaskData:
|
||||||
else:
|
else:
|
||||||
missing_list = [self.run_names_index[targetid]] + missing_list
|
missing_list = [self.run_names_index[targetid]] + missing_list
|
||||||
|
|
||||||
bb.msg.note(1, bb.msg.domain.Provider, "Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.run_names_index[targetid], missing_list))
|
providerlog.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
|
||||||
self.failed_rdeps.append(targetid)
|
self.failed_rdeps.append(targetid)
|
||||||
dependees = self.get_rdependees(targetid)
|
dependees = self.get_rdependees(targetid)
|
||||||
for fnid in dependees:
|
for fnid in dependees:
|
||||||
|
@ -521,7 +524,7 @@ class TaskData:
|
||||||
"""
|
"""
|
||||||
Resolve all unresolved build and runtime targets
|
Resolve all unresolved build and runtime targets
|
||||||
"""
|
"""
|
||||||
bb.msg.note(1, bb.msg.domain.TaskData, "Resolving any missing task queue dependencies")
|
logger.info("Resolving any missing task queue dependencies")
|
||||||
while True:
|
while True:
|
||||||
added = 0
|
added = 0
|
||||||
for target in self.get_unresolved_build_targets(dataCache):
|
for target in self.get_unresolved_build_targets(dataCache):
|
||||||
|
@ -539,7 +542,7 @@ class TaskData:
|
||||||
added = added + 1
|
added = added + 1
|
||||||
except bb.providers.NoRProvider:
|
except bb.providers.NoRProvider:
|
||||||
self.remove_runtarget(self.getrun_id(target))
|
self.remove_runtarget(self.getrun_id(target))
|
||||||
bb.msg.debug(1, bb.msg.domain.TaskData, "Resolved " + str(added) + " extra dependencies")
|
logger.debug(1, "Resolved " + str(added) + " extra dependencies")
|
||||||
if added == 0:
|
if added == 0:
|
||||||
break
|
break
|
||||||
# self.dump_data()
|
# self.dump_data()
|
||||||
|
@ -548,40 +551,40 @@ class TaskData:
|
||||||
"""
|
"""
|
||||||
Dump some debug information on the internal data structures
|
Dump some debug information on the internal data structures
|
||||||
"""
|
"""
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:")
|
logger.debug(3, "build_names:")
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index))
|
logger.debug(3, ", ".join(self.build_names_index))
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:")
|
logger.debug(3, "run_names:")
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index))
|
logger.debug(3, ", ".join(self.run_names_index))
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:")
|
logger.debug(3, "build_targets:")
|
||||||
for buildid in range(len(self.build_names_index)):
|
for buildid in range(len(self.build_names_index)):
|
||||||
target = self.build_names_index[buildid]
|
target = self.build_names_index[buildid]
|
||||||
targets = "None"
|
targets = "None"
|
||||||
if buildid in self.build_targets:
|
if buildid in self.build_targets:
|
||||||
targets = self.build_targets[buildid]
|
targets = self.build_targets[buildid]
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (buildid, target, targets))
|
logger.debug(3, " (%s)%s: %s", buildid, target, targets)
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:")
|
logger.debug(3, "run_targets:")
|
||||||
for runid in range(len(self.run_names_index)):
|
for runid in range(len(self.run_names_index)):
|
||||||
target = self.run_names_index[runid]
|
target = self.run_names_index[runid]
|
||||||
targets = "None"
|
targets = "None"
|
||||||
if runid in self.run_targets:
|
if runid in self.run_targets:
|
||||||
targets = self.run_targets[runid]
|
targets = self.run_targets[runid]
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (runid, target, targets))
|
logger.debug(3, " (%s)%s: %s", runid, target, targets)
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:")
|
logger.debug(3, "tasks:")
|
||||||
for task in range(len(self.tasks_name)):
|
for task in range(len(self.tasks_name)):
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % (
|
logger.debug(3, " (%s)%s - %s: %s",
|
||||||
task,
|
task,
|
||||||
self.fn_index[self.tasks_fnid[task]],
|
self.fn_index[self.tasks_fnid[task]],
|
||||||
self.tasks_name[task],
|
self.tasks_name[task],
|
||||||
self.tasks_tdepends[task]))
|
self.tasks_tdepends[task])
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, "dependency ids (per fn):")
|
logger.debug(3, "dependency ids (per fn):")
|
||||||
for fnid in self.depids:
|
for fnid in self.depids:
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.depids[fnid]))
|
logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, "runtime dependency ids (per fn):")
|
logger.debug(3, "runtime dependency ids (per fn):")
|
||||||
for fnid in self.rdepids:
|
for fnid in self.rdepids:
|
||||||
bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid]))
|
logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
|
||||||
|
|
|
@ -21,11 +21,14 @@ BitBake Utility Functions
|
||||||
|
|
||||||
import re, fcntl, os, string, stat, shutil, time
|
import re, fcntl, os, string, stat, shutil, time
|
||||||
import sys
|
import sys
|
||||||
import bb
|
|
||||||
import errno
|
import errno
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
import bb.msg
|
import bb.msg
|
||||||
from commands import getstatusoutput
|
from commands import getstatusoutput
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Util")
|
||||||
|
|
||||||
# Version comparison
|
# Version comparison
|
||||||
separators = ".-"
|
separators = ".-"
|
||||||
|
|
||||||
|
@ -307,9 +310,9 @@ def _print_trace(body, line):
|
||||||
max_line = min(line + 4, len(body))
|
max_line = min(line + 4, len(body))
|
||||||
for i in range(min_line, max_line + 1):
|
for i in range(min_line, max_line + 1):
|
||||||
if line == i:
|
if line == i:
|
||||||
bb.msg.error(bb.msg.domain.Util, " *** %.4d:%s" % (i, body[i-1]) )
|
logger.error(" *** %.4d:%s" % (i, body[i-1]) )
|
||||||
else:
|
else:
|
||||||
bb.msg.error(bb.msg.domain.Util, " %.4d:%s" % (i, body[i-1]) )
|
logger.error(" %.4d:%s" % (i, body[i-1]) )
|
||||||
|
|
||||||
|
|
||||||
def better_compile(text, file, realfile, mode = "exec"):
|
def better_compile(text, file, realfile, mode = "exec"):
|
||||||
|
@ -322,16 +325,16 @@ def better_compile(text, file, realfile, mode = "exec"):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# split the text into lines again
|
# split the text into lines again
|
||||||
body = text.split('\n')
|
body = text.split('\n')
|
||||||
bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: %s" % (realfile))
|
logger.error("Error in compiling python function in: %s" % (realfile))
|
||||||
bb.msg.error(bb.msg.domain.Util, str(e))
|
logger.error(str(e))
|
||||||
if e.lineno:
|
if e.lineno:
|
||||||
bb.msg.error(bb.msg.domain.Util, "The lines leading to this error were:")
|
logger.error("The lines leading to this error were:")
|
||||||
bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
|
logger.error("\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
|
||||||
_print_trace(body, e.lineno)
|
_print_trace(body, e.lineno)
|
||||||
else:
|
else:
|
||||||
bb.msg.error(bb.msg.domain.Util, "The function causing this error was:")
|
logger.error("The function causing this error was:")
|
||||||
for line in body:
|
for line in body:
|
||||||
bb.msg.error(bb.msg.domain.Util, line)
|
logger.error(line)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def better_exec(code, context, text, realfile = "<code>"):
|
def better_exec(code, context, text, realfile = "<code>"):
|
||||||
|
@ -351,9 +354,11 @@ def better_exec(code, context, text, realfile = "<code>"):
|
||||||
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
logger.exception("Error executing python function in '%s'", code.co_filename)
|
||||||
|
|
||||||
# print the Header of the Error Message
|
# print the Header of the Error Message
|
||||||
bb.msg.error(bb.msg.domain.Util, "There was an error when executing a python function in: %s" % code.co_filename)
|
logger.error("There was an error when executing a python function in: %s" % code.co_filename)
|
||||||
bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t, value))
|
logger.error("Exception:%s Message:%s" % (t, value))
|
||||||
|
|
||||||
# Strip 'us' from the stack (better_exec call)
|
# Strip 'us' from the stack (better_exec call)
|
||||||
tb = tb.tb_next
|
tb = tb.tb_next
|
||||||
|
@ -364,13 +369,13 @@ def better_exec(code, context, text, realfile = "<code>"):
|
||||||
|
|
||||||
tbextract = traceback.extract_tb(tb)
|
tbextract = traceback.extract_tb(tb)
|
||||||
tbformat = "\n".join(traceback.format_list(tbextract))
|
tbformat = "\n".join(traceback.format_list(tbextract))
|
||||||
bb.msg.error(bb.msg.domain.Util, "The stack trace of python calls that resulted in thie exception/failure was:")
|
logger.error("The stack trace of python calls that resulted in thie exception/failure was:")
|
||||||
for line in tbformat.split('\n'):
|
for line in tbformat.split('\n'):
|
||||||
bb.msg.error(bb.msg.domain.Util, line)
|
logger.error(line)
|
||||||
|
|
||||||
bb.msg.error(bb.msg.domain.Util, "The code that was being executed was:")
|
logger.error("The code that was being executed was:")
|
||||||
_print_trace(textarray, linefailed)
|
_print_trace(textarray, linefailed)
|
||||||
bb.msg.error(bb.msg.domain.Util, "(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
|
logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
|
||||||
|
|
||||||
# See if this is a function we constructed and has calls back into other functions in
|
# See if this is a function we constructed and has calls back into other functions in
|
||||||
# "text". If so, try and improve the context of the error by diving down the trace
|
# "text". If so, try and improve the context of the error by diving down the trace
|
||||||
|
@ -379,7 +384,7 @@ def better_exec(code, context, text, realfile = "<code>"):
|
||||||
while nexttb is not None:
|
while nexttb is not None:
|
||||||
if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
|
if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
|
||||||
_print_trace(textarray, tbextract[level+1][1])
|
_print_trace(textarray, tbextract[level+1][1])
|
||||||
bb.msg.error(bb.msg.domain.Util, "(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
|
logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
nexttb = tb.tb_next
|
nexttb = tb.tb_next
|
||||||
|
@ -400,11 +405,11 @@ def lockfile(name):
|
||||||
"""
|
"""
|
||||||
path = os.path.dirname(name)
|
path = os.path.dirname(name)
|
||||||
if not os.path.isdir(path):
|
if not os.path.isdir(path):
|
||||||
bb.msg.error(bb.msg.domain.Util, "Error, lockfile path does not exist!: %s" % path)
|
logger.error("Lockfile path '%s' does not exist", path)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not os.access(path, os.W_OK):
|
if not os.access(path, os.W_OK):
|
||||||
bb.msg.error(bb.msg.domain.Util, "Error, lockfile path is not writable!: %s" % path)
|
logger.error("Error, lockfile path is not writable!: %s" % path)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -534,7 +539,7 @@ def filter_environment(good_vars):
|
||||||
del os.environ[key]
|
del os.environ[key]
|
||||||
|
|
||||||
if len(removed_vars):
|
if len(removed_vars):
|
||||||
bb.msg.debug(1, bb.msg.domain.Util, "Removed the following variables from the environment: %s" % (", ".join(removed_vars)))
|
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars))
|
||||||
|
|
||||||
return removed_vars
|
return removed_vars
|
||||||
|
|
||||||
|
@ -604,10 +609,10 @@ def mkdirhier(dir):
|
||||||
directory already exists like os.makedirs
|
directory already exists like os.makedirs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
bb.msg.debug(3, bb.msg.domain.Util, "mkdirhier(%s)" % dir)
|
logger.debug(3, "mkdirhier(%s)", dir)
|
||||||
try:
|
try:
|
||||||
os.makedirs(dir)
|
os.makedirs(dir)
|
||||||
bb.msg.debug(2, bb.msg.domain.Util, "created " + dir)
|
logger.debug(2, "created " + dir)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST:
|
if e.errno != errno.EEXIST:
|
||||||
raise e
|
raise e
|
||||||
|
|
Loading…
Reference in New Issue